/// <summary>
 /// Constructs the builder with the provided capacity. The capacity should be chosen 
 /// to limit the number of memory re-allocations that occur within the contained
 /// connection list dictionary.
 /// </summary>
 public ConnectionGeneListBuilder(int connectionCapacity)
 {
     _connectionGeneList = new ConnectionGeneList(connectionCapacity);
     _connectionGeneDictionary = new Dictionary<ConnectionEndpointsStruct,ConnectionGene>(connectionCapacity);
     // TODO: Determine better initial capacity.
     _neuronDictionary = new SortedDictionary<uint,NeuronGene>();
 }
        public NeatGenome(uint genomeId,
                NeuronGeneList neuronGeneList,
                List<ModuleGene> moduleGeneList,
                ConnectionGeneList connectionGeneList,
                int inputNeuronCount,
                int outputNeuronCount)
        {
			this.genomeId = genomeId;

			this.neuronGeneList = neuronGeneList;
            this.moduleGeneList = moduleGeneList;
			this.connectionGeneList = connectionGeneList;

			this.inputNeuronCount = inputNeuronCount;
			this.inputAndBiasNeuronCount = inputNeuronCount+1;
			this.outputNeuronCount = outputNeuronCount;
			this.inputBiasOutputNeuronCount = inputAndBiasNeuronCount + outputNeuronCount;
			this.inputBiasOutputNeuronCountMinus2 = inputBiasOutputNeuronCount-2;

            //justin
            objectives = new double[6];
            nearestNeighbors = 0;
            localGenomeNovelty = 0;
            competition = 0;
            geneticDiversity = 0;
            locality = 0;
            Fitness = 0;
            RealFitness = 0;

			Debug.Assert(connectionGeneList.IsSorted(), "ConnectionGeneList is not sorted by innovation ID");
		}
Beispiel #3
0
        public NeatGenome(long genomeId,
                NeuronGeneList neuronGeneList,
                List<ModuleGene> moduleGeneList,
                ConnectionGeneList connectionGeneList,
                int inputNeuronCount,
                int outputNeuronCount)
        {
            this.genomeId = genomeId;

            this.neuronGeneList = neuronGeneList;
            this.moduleGeneList = moduleGeneList;
            this.connectionGeneList = connectionGeneList;

            this.inputNeuronCount = inputNeuronCount;
            this.inputAndBiasNeuronCount = inputNeuronCount+1;
            this.outputNeuronCount = outputNeuronCount;
            this.inputBiasOutputNeuronCount = inputAndBiasNeuronCount + outputNeuronCount;
            this.inputBiasOutputNeuronCountMinus2 = inputBiasOutputNeuronCount-2;

            //all genomes must be catalogued by the evolution manager
            EvolutionManager.SharedEvolutionManager.GenomeCreated(this);

            if (!connectionGeneList.IsSorted())
            {
                connectionGeneList.IsSorted();
            }

            Debug.Assert(connectionGeneList.IsSorted(), "ConnectionGeneList is not sorted by innovation ID");
        }
Beispiel #4
0
 // Schrum: Added
 public NeatGenome(uint genomeId,
                 NeuronGeneList neuronGeneList,
                 ConnectionGeneList connectionGeneList,
                 int inputNeuronCount,
                 int outputNeuronCount,
                 int outputsPerPolicy) // Schrum: Added required parameter
     : this(genomeId, neuronGeneList, new List<ModuleGene>(), connectionGeneList, inputNeuronCount, outputNeuronCount, outputsPerPolicy) { }
Beispiel #5
0
 public NeatGenome(long genomeId,
                 NeuronGeneList neuronGeneList,
                 ConnectionGeneList connectionGeneList,
                 int inputNeuronCount,
                 int outputNeuronCount)
     : this(genomeId, neuronGeneList, new List<ModuleGene>(), connectionGeneList, inputNeuronCount, outputNeuronCount)
 {
 }
Beispiel #6
0
 public NeatGenome(uint genomeId,
                 NeuronGeneList neuronGeneList,
                 ConnectionGeneList connectionGeneList,
                 int inputNeuronCount,
                 int outputNeuronCount) 
     : this(genomeId, neuronGeneList, new List<ModuleGene>(), connectionGeneList, inputNeuronCount, 
     // Schrum: Added new constructor that assumes there is only one output module (default NEAT), so total outputs = outputs per policy
     outputNeuronCount, outputNeuronCount) { }
 /// <summary>
 /// Create a genome with the provided internal state/definition data/objects.
 /// Overridable method to allow alternative NeatGenome sub-classes to be used.
 /// </summary>
 public override NeatGenome CreateGenome(uint id,
                                        uint birthGeneration,
                                        NeuronGeneList neuronGeneList,
                                        ConnectionGeneList connectionGeneList,
                                        int inputNeuronCount,
                                        int outputNeuronCount,
                                        bool rebuildNeuronGeneConnectionInfo)
 {
     return new NeatGenome(this, id, birthGeneration, neuronGeneList, connectionGeneList,
                           inputNeuronCount, outputNeuronCount, rebuildNeuronGeneConnectionInfo, false);
 }
		public static NeatGenome Read(XmlElement xmlGenome)
		{
			int inputNeuronCount=0;
			int outputNeuronCount=0;

			uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));

			//--- Read neuron genes into a list.
			NeuronGeneList neuronGeneList = new NeuronGeneList();
			XmlNodeList listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");
			foreach(XmlElement xmlNeuronGene in listNeuronGenes)
			{
				NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

				// Count the input and output neurons as we go.
				switch(neuronGene.NeuronType)
				{
					case NeuronType.Input:
						inputNeuronCount++;
						break;
					case NeuronType.Output:
						outputNeuronCount++;
						break;
				}

				neuronGeneList.Add(neuronGene);
			}

            //--- Read module genes into a list.
            List<ModuleGene> moduleGeneList = new List<ModuleGene>();
            XmlNodeList listModuleGenes = xmlGenome.SelectNodes("modules/module");
            foreach (XmlElement xmlModuleGene in listModuleGenes) {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

			//--- Read connection genes into a list.
			ConnectionGeneList connectionGeneList = new ConnectionGeneList();
			XmlNodeList listConnectionGenes = xmlGenome.SelectNodes("connections/connection");
			foreach(XmlElement xmlConnectionGene in listConnectionGenes)
				connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
			
			//return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            NeatGenome g = new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            g.Behavior = ReadBehavior(xmlGenome.SelectSingleNode("behavior"));
            g.Behavior.objectives = new double[6];
            g.objectives = new double[6];


            // JUSTIN: Read grid/trajectory info
            g.GridCoords = ReadGrid(xmlGenome.SelectSingleNode("grid"));
            g.Behavior.trajectory = ReadTrajectory(xmlGenome.SelectSingleNode("trajectory"));

            return g;
		}
 public void generateConnections(List <PointF> inputNeuronPositions, List <PointF> outputNeuronPositions,
                                 INetwork genome, float sampleWidth, float sampleThreshold, float neighborLevel,
                                 float increaseResolutionThreshold, float minDistance,
                                 float connectionThreshold,
                                 uint inputCount, uint outputCount,
                                 float minX, float minY, float maxX, float maxY,
                                 ref ConnectionGeneList connections, ref List <PointF> hiddenNeurons)
 {
     generateConnections(inputNeuronPositions, outputNeuronPositions, genome, sampleWidth,
                         sampleThreshold, neighborLevel, increaseResolutionThreshold, minDistance,
                         connectionThreshold, inputCount, outputCount, minX, minY, maxX, maxY, ref connections, ref hiddenNeurons, float.NaN);
 }
        public static NeatGenome Read(XmlElement xmlGenome)
        {
            int inputNeuronCount  = 0;
            int outputNeuronCount = 0;

            uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));

            //--- Read neuron genes into a list.
            NeuronGeneList neuronGeneList  = new NeuronGeneList();
            XmlNodeList    listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");

            foreach (XmlElement xmlNeuronGene in listNeuronGenes)
            {
                NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

                // Count the input and output neurons as we go.
                switch (neuronGene.NeuronType)
                {
                case NeuronType.Input:
                    inputNeuronCount++;
                    break;

                case NeuronType.Output:
                    outputNeuronCount++;
                    break;
                }

                neuronGeneList.Add(neuronGene);
            }

            //--- Read module genes into a list.
            List <ModuleGene> moduleGeneList  = new List <ModuleGene>();
            XmlNodeList       listModuleGenes = xmlGenome.SelectNodes("modules/module");

            foreach (XmlElement xmlModuleGene in listModuleGenes)
            {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

            //--- Read connection genes into a list.
            ConnectionGeneList connectionGeneList  = new ConnectionGeneList();
            XmlNodeList        listConnectionGenes = xmlGenome.SelectNodes("connections/connection");

            foreach (XmlElement xmlConnectionGene in listConnectionGenes)
            {
                connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
            }

            //return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            return(new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount));
        }
Beispiel #11
0
        public NeatGenome.NeatGenome generatePerceptronCircle(INetwork network, bool distance)
        {
            ConnectionGeneList connections = new ConnectionGeneList((int)(inputCount * outputCount));

            double[] inputs;
            if (distance)//|| angle)
            {
                inputs = new double[5];
            }
            else
            {
                inputs = new double[4];
            }
            double output;
            uint   counter          = 0;
            double inputAngleDelta  = (2 * Math.PI) / inputCount;
            double outputAngleDelta = (2 * Math.PI) / outputCount;

            double angleFrom = -3 * Math.PI / 4;

            for (uint neuronFrom = 0; neuronFrom < inputCount; neuronFrom++, angleFrom += inputAngleDelta)
            {
                inputs[0] = .5 * Math.Cos(angleFrom + (inputAngleDelta / 2.0));
                inputs[1] = .5 * Math.Sin(angleFrom + (inputAngleDelta / 2.0));
                double angleTo = -3 * Math.PI / 4;
                for (uint neuronTo = 0; neuronTo < outputCount; neuronTo++, angleTo += outputAngleDelta)
                {
                    inputs[2] = Math.Cos(angleTo + (outputAngleDelta / 2.0));
                    inputs[3] = Math.Sin(angleTo + (outputAngleDelta / 2.0));
                    //if(angle)
                    //inputs[4] = Math.Abs(angleFrom - angleTo);
                    if (distance)
                    {
                        inputs[4] = ((Math.Sqrt(Math.Pow(inputs[0] - inputs[2], 2) + Math.Pow(inputs[1] - inputs[3], 2)) / (2 * sqrt2)));
                    }
                    network.ClearSignals();
                    network.SetInputSignals(inputs);
                    network.MultipleSteps(5);
                    output = network.GetOutputSignal(0);
                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(counter++, neuronFrom, neuronTo + inputCount, weight));
                    }
                }
            }
            NeatGenome.NeatGenome g = new NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
            return(g);
        }
Beispiel #12
0
        public NeatGenome.NeatGenome generatePerceptronPattern(INetwork network, bool distance)
        {
            ConnectionGeneList connections = new ConnectionGeneList((int)(inputCount * outputCount));

            double[] inputs;
            if (distance)
            {
                inputs = new double[5];
            }
            else
            {
                inputs = new double[4];
            }
            //for this particular config, these inputs will never change so just set them now
            inputs[1] = 1;
            inputs[3] = -1;
            uint   counter = 0;
            double output;
            double x1 = -1, x2 = -1;

            double inputDelta  = (2.0 / (inputCount - 1));
            double outputDelta = (2.0 / (outputCount - 1));

            for (uint nodeFrom = 0; nodeFrom < inputCount; nodeFrom++, x1 += inputDelta)
            {
                inputs[0] = x1;
                x2        = -1;
                for (uint nodeTo = 0; nodeTo < outputCount; nodeTo++, x2 += outputDelta)
                {
                    inputs[2] = x2;
                    if (distance)
                    {
                        inputs[4] = ((Math.Sqrt(Math.Pow(inputs[0] - inputs[2], 2) + Math.Pow(inputs[1] - inputs[3], 2)) / (2 * sqrt2)));
                    }
                    network.ClearSignals();
                    network.SetInputSignals(inputs);
                    //currenly assuming a depth no greater than 5
                    network.MultipleSteps(5);
                    output = network.GetOutputSignal(0);
                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(counter++, nodeFrom, nodeTo + inputCount, weight));
                    }
                }
            }
            NeatGenome.NeatGenome g = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
            return(g);
        }
		public static NeatGenome Read(XmlElement xmlGenome)
		{
			int inputNeuronCount=0;
			int outputNeuronCount=0;

			uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));
            // Schrum: Retrieve this new property, which is saved to xml files now
            int outputsPerPolicy = int.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "outputsperpolicy"));

			//--- Read neuron genes into a list.
			NeuronGeneList neuronGeneList = new NeuronGeneList();
			XmlNodeList listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");
			foreach(XmlElement xmlNeuronGene in listNeuronGenes)
			{
				NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

				// Count the input and output neurons as we go.
				switch(neuronGene.NeuronType)
				{
					case NeuronType.Input:
						inputNeuronCount++;
						break;
					case NeuronType.Output:
						outputNeuronCount++;
						break;
				}

				neuronGeneList.Add(neuronGene);
			}

            //--- Read module genes into a list.
            List<ModuleGene> moduleGeneList = new List<ModuleGene>();
            XmlNodeList listModuleGenes = xmlGenome.SelectNodes("modules/module");
            foreach (XmlElement xmlModuleGene in listModuleGenes) {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

			//--- Read connection genes into a list.
			ConnectionGeneList connectionGeneList = new ConnectionGeneList();
			XmlNodeList listConnectionGenes = xmlGenome.SelectNodes("connections/connection");
			foreach(XmlElement xmlConnectionGene in listConnectionGenes)
				connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
			
			//return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            //return new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            // Schrum: Changed to include the outputs per policy
            return new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount, outputsPerPolicy);
        }
        public override NeatGenome generateGenome(INetwork network)
        {
            // copy the neuron list to a new list and update the x/y values
            NeuronGeneList newNeurons = new NeuronGeneList(neurons);

            // set the x and y value of the SUPGs
            foreach (NeuronGene neuron in newNeurons)
            {
                if (neuron.NeuronType == NeuronType.Hidden)
                {
                    // switch to grid substrate configuration
                    neuron.XValue = getXPos2(neuron.InnovationId - 16);
                    neuron.YValue = getYPos2(neuron.InnovationId - 16);
                }
            }

            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));

            float[] coordinates = new float[5];
            //float output;
            uint connectionCounter = 0;
            int  iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            // connect hidden layer to outputs
            for (uint source = 0; source < hiddenCount; source++)
            {
                coordinates[0] = getXPos(source, false);
                coordinates[1] = getYPos(source, false);

                for (uint target = 0; target < outputCount; target++)
                {
                    // only connect hidden nodes to their single nearest output
                    if (source == target)
                    {
                        coordinates[2] = getXPos(target, true);
                        coordinates[3] = getYPos(target, true);

                        // GWM - fixing weight to 1 for SUPG producing motor outputs
                        connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, target + inputCount, 1));
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, newNeurons, connections, (int)inputCount, (int)outputCount));
        }
Beispiel #15
0
        /// <summary>
        /// Default constructor.
        /// </summary>
        public NeatGenome(	uint genomeId, 
            NeuronGeneList neuronGeneList,
            ConnectionGeneList connectionGeneList,
            int inputNeuronCount,
            int outputNeuronCount)
        {
            this.genomeId = genomeId;

            this.neuronGeneList = neuronGeneList;
            this.connectionGeneList = connectionGeneList;

            this.inputNeuronCount = inputNeuronCount;
            this.inputAndBiasNeuronCount = inputNeuronCount+1;
            this.outputNeuronCount = outputNeuronCount;
            this.inputBiasOutputNeuronCount = inputAndBiasNeuronCount + outputNeuronCount;
            this.inputBiasOutputNeuronCountMinus2 = inputBiasOutputNeuronCount-2;

            Debug.Assert(connectionGeneList.IsSorted(), "ConnectionGeneList is not sorted by innovation ID");
        }
    void QueryConnection(INetwork network, ConnectionGeneList connections, uint connectionCounter, uint neuron1id, uint neuron2id, NeuronGeneList newNeurons)
    {
        network.ClearSignals();
        //network.SetInputSignal(0, 1);
        network.SetInputSignal(0, newNeurons[(int)neuron1id].XValue);
        network.SetInputSignal(1, newNeurons[(int)neuron1id].YValue);
        network.SetInputSignal(2, newNeurons[(int)neuron2id].XValue);
        network.SetInputSignal(3, newNeurons[(int)neuron2id].YValue);
        network.SetInputSignal(4, 1);
        network.MultipleSteps(10);

        float output = network.GetOutputSignal(0);

        if (Math.Abs(output) > threshold)
        {
            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
            connections.Add(new ConnectionGene(connectionCounter, neuron1id, neuron2id, weight));
        }
    }
        public static NeatGenome Read(XmlElement xmlGenome)
        {
            int inputNeuronCount=0;
            int outputNeuronCount=0;

            uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));

            //--- Read neuron genes into a list.
            NeuronGeneList neuronGeneList = new NeuronGeneList();
            XmlNodeList listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");
            foreach(XmlElement xmlNeuronGene in listNeuronGenes)
            {
                NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

                // Count the input and output neurons as we go.
                switch(neuronGene.NeuronType)
                {
                    case NeuronType.Input:
                        inputNeuronCount++;
                        break;
                    case NeuronType.Output:
                        outputNeuronCount++;
                        break;
                }

                neuronGeneList.Add(neuronGene);
            }

            //--- Read connection genes into a list.
            ConnectionGeneList connectionGeneList = new ConnectionGeneList();
            XmlNodeList listConnectionGenes = xmlGenome.SelectNodes("connections/connection");
            foreach(XmlElement xmlConnectionGene in listConnectionGenes)
                connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));

            return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
        }
 public void normalizeWeightConnections(ref ConnectionGeneList connections, int neuronCount)
 {
     double[] weightSumPos = new double[neuronCount];
     double[] weightSumNeg = new double[neuronCount];
     ////Normalize Connection Weights
     ////ONLY NORMALIZE WEIGHTS BETWEEN HIDDEN NEURONS
     for (int i = 0; i < connections.Count; i++)
     {
         if (connections[i].Weight >= 0.0f)
         {
             weightSumPos[connections[i].TargetNeuronId] += Math.Abs(connections[i].Weight); //connections[i].weight; //Abs value?
         }
         else
         {
             weightSumNeg[connections[i].TargetNeuronId] += Math.Abs(connections[i].Weight); //connections[i].weight; //Abs value?
         }
     }
     for (int i = 0; i < connections.Count; i++)
     {
         if (connections[i].Weight >= 0.0f)
         {
             if (weightSumPos[connections[i].TargetNeuronId] != 0.0f)
             {
                 connections[i].Weight /= weightSumPos[connections[i].TargetNeuronId];
             }
         }
         else
         {
             if (weightSumNeg[connections[i].TargetNeuronId] != 0.0f)
             {
                 connections[i].Weight /= weightSumNeg[connections[i].TargetNeuronId];
             }
         }
         connections[i].Weight *= 3.0;
     }
 }
Beispiel #19
0
		/// <summary>
		/// Copy constructor.
		/// </summary>
		/// <param name="copyFrom"></param>
		public NeatGenome(NeatGenome copyFrom, uint genomeId)
		{
			this.genomeId = genomeId;
            this.parent = copyFrom;

			// No need to loop the arrays to clone each element because NeuronGene and ConnectionGene are 
			// value data types (structs).
			neuronGeneList = new NeuronGeneList(copyFrom.neuronGeneList);
            moduleGeneList = new List<ModuleGene>(copyFrom.moduleGeneList);
			connectionGeneList = new ConnectionGeneList(copyFrom.connectionGeneList);

            //joel
            if(copyFrom.Behavior!=null)
            Behavior = new SharpNeatLib.BehaviorType(copyFrom.Behavior);
            
			inputNeuronCount = copyFrom.inputNeuronCount;
            // Schrum: Removed (not used)
			//inputAndBiasNeuronCount = copyFrom.inputNeuronCount+1;
			outputNeuronCount = copyFrom.outputNeuronCount;
            // Schrum: removed (not used)
			//inputBiasOutputNeuronCount = copyFrom.inputBiasOutputNeuronCount;
			//inputBiasOutputNeuronCountMinus2 = copyFrom.inputBiasOutputNeuronCountMinus2;
            // Schrum: Added
            outputsPerPolicy = copyFrom.outputsPerPolicy;

			Debug.Assert(connectionGeneList.IsSorted(), "ConnectionGeneList is not sorted by innovation ID");
		}
        public NeatGenome.NeatGenome generateGenomeStackSituationalPolicy(INetwork network, List <float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet, float signal)
        {
            // Schrum: For debugging
            //Console.WriteLine("generateGenomeStackSituationalPolicy:signal=" + signal);
            //Console.WriteLine("CPPN inputs = " + network.InputNeuronCount);

            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));

            // Schrum: Too many inputs: Only store those that are needed
            //float[] coordinates = new float[5 + 1]; // <-- Schrum: bit sloppy: frequently results in unused CPPN inputs. Should make more precise
            float[] coordinates = new float[network.InputNeuronCount]; // Schrum: CPPN tracks how many inputs it needs
            float   output;
            uint    connectionCounter = 0;
            float   agentDelta        = 2.0f / (numberOfAgents - 1);
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount  = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            // Schrum: If we are inside this function, then we either have a heterogeneous team
            //         of a single agent (not sure why that ended up being the case; odd use of homogeneousTeam).
            //         Therefore, numberOfAgents tells us whether we need to save space for a Z-coordinate,
            //         and whether we are expecting a Situation input.
            if (numberOfAgents == 1 && coordinates.Length > 4)
            {
                coordinates[4] = signal; // No Z coord, but save situation
            }
            else if (coordinates.Length > 5)
            {
                coordinates[5] = signal; // Both Z coord and situation
            }
            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in thisorder: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint  agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                // Schrum: Only include Z-coord as input if there are multiple team members
                if (numberOfAgents > 1)
                {
                    coordinates[4] = stackCoordinate; // Schrum: z-coord will always be at index 4
                }
                // Schrum: Debug
                //Console.WriteLine("CPPN inputs (first 4 blank): " + string.Join(",", coordinates));


                uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;


                        foreach (PointF source in ng.NeuronPositions)
                        {
                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;

                                //Input
                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;

                                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;      //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;

                                case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;

                                case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //--- bias
                                //-----------------Get the bias of the target node
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID]    = true;
                                }
                                //--bias



                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                // Schrum: Debug
                                //Console.WriteLine("CPPN inputs: " + string.Join(",", coordinates));

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                // Schrum: Observation: It seems impossible to use both LEO and adaptive networks because of these hardcoded magic numbers
                                if (adaptiveNetwork)
                                {
                                    A            = network.GetOutputSignal(2);
                                    B            = network.GetOutputSignal(3);
                                    C            = network.GetOutputSignal(4);
                                    D            = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                // Schrum: Observation: In long run, might be desirable to use LEO, but incompatible with special preference neuron output
                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo       = network.GetOutputSignal(2);
                                }

                                // Schrum: This is a horrible hack, but it gets the job done for now.
                                // The reason this works is that it makes the following assumptions that could easily be broken in the future:
                                // 1) It is assumed that the only reason a CPPN would have 3 outputs per policy is if the third is for preference links
                                // 2) It is assumed that in a substrate with a preference neuron, the y-coord will always be 0.8, and no other neuron will have
                                //    that y-coord.
                                //Console.WriteLine("output:" + coordinates[0] + "," + coordinates[1] + ":" + coordinates[2] + "," + coordinates[3]);
                                //Console.WriteLine("network.OutputsPerPolicy == 3" + (network.OutputsPerPolicy == 3));
                                //Console.WriteLine("target.Y == 0.8" + (target.Y == 0.8f));
                                if (network.OutputsPerPolicy == 3 && target.Y == 0.8f)
                                {
                                    // The output from the link for the preference neuron replaces the standard output.
                                    // Because the link weight is defined by a totally different CPPN output, the preference
                                    // neuron is more free to behave very differently.
                                    output = network.GetOutputSignal(2);
                                    //Console.WriteLine("Preference output:" + coordinates[0] + "," + coordinates[1] + ":" + coordinates[2] + "," + coordinates[3]);
                                }

                                if (!useLeo || leo > 0.0)
                                {
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight =
                                            (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) *
                                                    weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive networkset weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new
                                                        ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref
                                                                       coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable  = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;

            // Schrum: Debugging
            // Looking at the control networks has revealed that the order of details in the substrate
            // description is important. The layer with the preference neuron has to be defined last
            // if it is to be the final neuron in the linearly organized output layer.
            //XmlDocument doc = new XmlDocument();
            //SharpNeatLib.NeatGenome.Xml.XmlGenomeWriterStatic.Write(doc, sng);
            //System.IO.FileInfo oFileInfo = new System.IO.FileInfo("temp.xml");
            //doc.Save(oFileInfo.FullName);

            return(sng);
        }
        private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            float[] coordinates = new float[4];
            float   output;
            uint    connectionCounter = 0;
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount;
            uint totalInputCount  = InputCount;
            uint totalHiddenCount = HiddenCount;

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];


            uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;

            foreach (NeuronGroup ng in neuronGroups)
            {
                foreach (uint connectedTo in ng.ConnectedTo)
                {
                    connectedNG = getNeuronGroup(connectedTo);

                    sourceCount = 0;
                    foreach (PointF source in ng.NeuronPositions)
                    {
                        targetCout = 0;
                        foreach (PointF target in connectedNG.NeuronPositions)
                        {
                            switch (ng.GroupType)
                            {
                            case 0: sourceID = ng.GlobalID + sourceCount; break;                                      //Input

                            case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                    //Output

                            case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break; //Hidden
                            }

                            switch (connectedNG.GroupType)
                            {
                            case 0: targetID = connectedNG.GlobalID + targetCout; break;

                            case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;

                            case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
                            }

                            //calculate bias of target node
                            if (!biasCalculated[targetID])
                            {
                                coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                biasCalculated[targetID]    = true;
                            }

                            coordinates[0] = source.X;
                            coordinates[1] = source.Y;
                            coordinates[2] = target.X;
                            coordinates[3] = target.Y;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            ((ModularNetwork)network).RecursiveActivation();
                            //network.MultipleSteps(iterations);
                            output = network.GetOutputSignal(0);

                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                            }
                            //else
                            //{
                            //    Console.WriteLine("Not connected");
                            //}
                            targetCout++;
                        }
                        sourceCount++;
                    }
                }
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable  = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
            return(gn);
        }
        public NeatGenome.NeatGenome generateMultiGenomeModulus(INetwork network, uint numberOfAgents)
        {
            #if OUTPUT
            System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
            #endif
            float[] coordinates = new float[4];
            float output;
            uint connectionCounter = 0;

            uint inputsPerAgent = inputCount / numberOfAgents;
            uint hiddenPerAgent = hiddenCount / numberOfAgents;
            uint outputsPerAgent = outputCount / numberOfAgents;

            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount*hiddenCount)+(hiddenCount*outputCount)));

            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            coordinates[0] = -1 + inputDelta / 2.0f;    //x1
            coordinates[1] = -1;                        //y1
            coordinates[2] = -1 + hiddenDelta / 2.0f;   //x2
            coordinates[3] = 0;                         //y2

            for (uint agent = 0; agent < numberOfAgents; agent++)
            {
                coordinates[0] = -1 + (agent * inputsPerAgent * inputDelta) + inputDelta / 2.0f;
                for (uint source = 0; source < inputsPerAgent; source++, coordinates[0] += inputDelta)
                {
                    coordinates[2] = -1 + (agent * hiddenPerAgent * hiddenDelta) + hiddenDelta / 2.0f;
                    for (uint target = 0; target < hiddenPerAgent; target++, coordinates[2] += hiddenDelta)
                    {

                        //Since there are an equal number of input and hidden nodes, we check these everytime
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        ((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
                        output = network.GetOutputSignal(0);
            #if OUTPUT
                            foreach (double d in inputs)
                                sw.Write(d + " ");
                            sw.Write(output);
                            sw.WriteLine();
            #endif
                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, (agent*inputsPerAgent) + source, (agent*hiddenPerAgent) + target + inputCount + outputCount, weight));
                        }

                        //Since every other hidden node has a corresponding output node, we check every other time
                        if (target % 2 == 0)
                        {
                            network.ClearSignals();
                            coordinates[1] = 0;
                            coordinates[3] = 1;
                            network.SetInputSignals(coordinates);
                            ((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
                            output = network.GetOutputSignal(0);
            #if OUTPUT
                            foreach (double d in inputs)
                                sw.Write(d + " ");
                            sw.Write(output);
                            sw.WriteLine();
            #endif
                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, (agent*hiddenPerAgent) + source + inputCount + outputCount, ((outputsPerAgent * agent) + ((target) / 2)) + inputCount, weight));
                            }
                            coordinates[1] = -1;
                            coordinates[3] = 0;

                        }
                    }
                }
            }
            #if OUTPUT
            sw.Flush();
            #endif
            //Console.WriteLine(count);
            //Console.ReadLine();
            return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
        }
        // MPS support on the Hive methods only
        #region Generate heterogenous genomes with z-stack

        // MPS NOT supported by this method
        private NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            if (useMultiPlaneSubstrate) throw new Exception("MPS not implemented for these parameters");
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));
            float[] coordinates = new float[5];
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {

                            //-----------------Get the bias of the source node
                            switch (ng.GroupType)
                            {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                            }
                            coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.RecursiveActivation();//network.MultipleSteps(iterations);

                            neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                    case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.RecursiveActivation();//network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A = network.GetOutputSignal(2);
                                    B = network.GetOutputSignal(3);
                                    C = network.GetOutputSignal(4);
                                    D = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates));
                                    }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
Beispiel #24
0
        public ESBodyInformation genomeIntoBodyObject(IGenome genome, out bool isEmpty)
        {
            INetwork net = GenomeDecoder.DecodeToModularNetwork((NeatGenome)genome);

            isEmpty = false;

            //we want the genome, so we can acknowledge the genomeID!

            //now convert a network to a set of hidden neurons and connections

            //we'll make body specific function calls later
            var allBodyOutputs       = new List <List <float> >();
            var allBodyInputs        = new List <PointPair>();
            var indexToConnectionMap = new Dictionary <int, int>();

            List <PointF> inputs, outputs, hiddenNeurons;

            inputs        = new List <PointF>();
            outputs       = new List <PointF>();
            hiddenNeurons = new List <PointF>();

            //inputs.Add(new PointF(0,0));

            //int initialDepth, ESIterations;
            //uint inputCount, outputCount;
            //float varianceThreshold, bandThreshold;

            ConnectionGeneList connections = new ConnectionGeneList();


            //loop through a grid, defined by some resolution, and test every connection against another using leo


            int resolution = 9;
            //int resolutionHalf = resolution / 2;

            List <PointF> queryPoints    = gridQueryPoints(resolution);
            float         xDistanceThree = dXDistance(resolution, 3.0f);
            float         yDistanceThree = dYDistance(resolution, 3.0f);


            bool useLeo = true;

            int counter = 0;
            Dictionary <long, PointF> conSourcePoints = new Dictionary <long, PointF>();
            Dictionary <long, PointF> conTargetPoints = new Dictionary <long, PointF>();


            //Dictionary<string, List<PointF>> pointsChecked = new Dictionary<string, List<PointF>>();
            //List<PointF> pList;
            int src, tgt;

            //for each points we have
            for (int p1 = 0; p1 < queryPoints.Count; p1++)
            {
                PointF xyPoint = queryPoints[p1];

                //query against all other points (possibly limiting certain connection lengths
                for (int p2 = p1; p2 < queryPoints.Count; p2++)
                {
                    PointF otherPoint = queryPoints[p2];

                    if (p1 != p2 && (Math.Abs(xyPoint.X - otherPoint.X) < xDistanceThree && Math.Abs(xyPoint.Y - otherPoint.Y) < yDistanceThree))
                    {
                        //if(!pointsChecked.TryGetValue(xyPoint.ToString(), out pList))
                        //{
                        //    pList = new List<PointF>();
                        //    pointsChecked.Add(xyPoint.ToString(), pList);
                        //}
                        //pList.Add(otherPoint);

                        //if (!pointsChecked.TryGetValue(otherPoint.ToString(), out pList))
                        //{
                        //    pList = new List<PointF>();
                        //    pointsChecked.Add(otherPoint.ToString(), pList);
                        //}
                        //pList.Add(xyPoint);

                        //Console.WriteLine("Checking: ({0}, {1}) => ({2}, {3}) ", xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y);

                        float[] outs   = queryCPPNOutputs((ModularNetwork)net, xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y, maxXDistanceCenter(xyPoint, otherPoint), minYDistanceGround(xyPoint, otherPoint));
                        float   weight = outs[0];

                        allBodyInputs.Add(new PointPair(xyPoint, otherPoint));
                        allBodyOutputs.Add(new List <float>(outs));


                        if (useLeo)
                        {
                            if (outs[1] > 0)
                            {
                                //Console.WriteLine("XY: " + xyPoint + " Other: " + otherPoint + " LEO : " + outs[1]) ;

                                //Console.WriteLine(" XDist: " + sqrt(xDistanceSq(xyPoint, otherPoint))
                                //    + " yDist : " + sqrt(yDistanceSq(xyPoint, otherPoint))
                                //    + " MaxDist: " + maxXDistanceCenter(xyPoint, otherPoint))
                                //+ " MinY: " + minYDistanceGround(xyPoint, otherPoint));
                                //Console.WriteLine();

                                //add to hidden neurons
                                if (!hiddenNeurons.Contains(xyPoint))
                                {
                                    hiddenNeurons.Add(xyPoint);
                                }

                                src = hiddenNeurons.IndexOf(xyPoint);

                                if (!hiddenNeurons.Contains(otherPoint))
                                {
                                    hiddenNeurons.Add(otherPoint);
                                }

                                tgt = hiddenNeurons.IndexOf(otherPoint);

                                conSourcePoints.Add(counter, xyPoint);
                                conTargetPoints.Add(counter, otherPoint);

                                indexToConnectionMap.Add(allBodyOutputs.Count - 1, counter);
                                connections.Add(new ConnectionGene(counter++, (src), (tgt), weight * HyperNEATParameters.weightRange, new float[] { xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y }, outs));
                            }
                        }
                        else
                        {
                            //add to hidden neurons
                            if (!hiddenNeurons.Contains(xyPoint))
                            {
                                hiddenNeurons.Add(xyPoint);
                            }

                            src = hiddenNeurons.IndexOf(xyPoint);

                            if (!hiddenNeurons.Contains(otherPoint))
                            {
                                hiddenNeurons.Add(otherPoint);
                            }

                            tgt = hiddenNeurons.IndexOf(otherPoint);

                            conSourcePoints.Add(counter, xyPoint);
                            conTargetPoints.Add(counter, otherPoint);

                            indexToConnectionMap.Add(allBodyOutputs.Count - 1, counter);
                            connections.Add(new ConnectionGene(counter++, (src), (tgt), weight * HyperNEATParameters.weightRange, new float[] { xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y }, outs));
                        }


                        //PointF newp = new PointF(p.x2, p.y2);

                        //targetIndex = hiddenNeurons.IndexOf(newp);
                        //if (targetIndex == -1)
                        //{
                        //    targetIndex = hiddenNeurons.Count;
                        //    hiddenNeurons.Add(newp);
                        //}
                        //connections.Add(new ConnectionGene(counter++, (sourceIndex), (targetIndex + inputCount + outputCount), p.weight * HyperNEATParameters.weightRange, new float[] { p.x1, p.y1, p.x2, p.y2 }, p.Outputs));
                    }
                }
            }



            //esSubstrate.generateSubstrate(inputs, outputs, net,
            //    HyperNEATParameters.initialDepth,
            //    (float)HyperNEATParameters.varianceThreshold,
            //     (float)HyperNEATParameters.bandingThreshold,
            //    HyperNEATParameters.ESIterations,
            //     (float)HyperNEATParameters.divisionThreshold,
            //    HyperNEATParameters.maximumDepth,
            //    (uint)inputs.Count, (uint)outputs.Count,
            //    ref connections, ref hiddenNeurons, true);


            //generateSubstrate(List<System.Drawing.PointF> inputNeuronPositions, List<PointF> outputNeuronPositions,
            //INetwork genome, int initialDepth, float varianceThreshold, float bandThreshold, int ESIterations,
            //                                    float divsionThreshold, int maxDepth,
            //                                    uint inputCount, uint outputCount,
            //                                    ref  ConnectionGeneList connections, ref List<PointF> hiddenNeurons)

            //blow out the object, we don't care about testing it

            //foreach (var pPair in pointsChecked)
            //{
            //    Console.WriteLine("Checking: " + pPair.Key + " processed: ");

            //    foreach (var xyPoint in pPair.Value)
            //    {
            //        Console.WriteLine("({0}, {1}) ", xyPoint.X, xyPoint.Y);
            //    }
            //}

            var beforeConn   = connections.Count;
            var beforeNeuron = hiddenNeurons.Count;

            //var hiddenCopy = new List<PointF>(hiddenNeurons);

            ensureSingleConnectedStructure(connections, hiddenNeurons, conSourcePoints, conTargetPoints);

            if (hiddenNeurons.Count > 20 || connections.Count > 100)
            {
                hiddenNeurons = new List <PointF>();
                connections   = new ConnectionGeneList();
            }


            if (hiddenNeurons.Count == 0 || connections.Count == 0)
            {
                isEmpty = true;
            }

            NeatGenome ng = (NeatGenome)genome;

            bool behaviorExists = (ng.Behavior != null);

            ESBodyInformation esbody = new ESBodyInformation()
            {
                AllBodyOutputs    = allBodyOutputs,
                AllBodyInputs     = allBodyInputs,
                indexToConnection = indexToConnectionMap,
                //PreHiddenLocations = hiddenCopy,
                BeforeNeuron     = beforeNeuron,
                BeforeConnection = beforeConn,
                GenomeID         = genome.GenomeId,
                Connections      = connections,
                HiddenLocations  = hiddenNeurons,
                InputLocations   = inputs,
                Objectives       = ng.objectives,
                Fitness          = ng.Fitness,
                Locality         = ng.locality,
                useLEO           = useLeo
            };

            Console.WriteLine(" Nodes: " + hiddenNeurons.Count + " Connections: " + connections.Count);

            return(esbody);
        }
Beispiel #25
0
        public virtual NeatGenome.NeatGenome generateGenome(INetwork network)
        {
            double[]           coordinates = new double[4];
            float              output;
            uint               connectionCounter = 0;
            int                iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            ConnectionGeneList connections       = new ConnectionGeneList();

            if (hiddenCount > 0)
            {
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                coordinates[3] = 0;
                for (uint input = 0; input < inputCount; input++, coordinates[0] += inputDelta)
                {
                    coordinates[2] = -1 + hiddenDelta / 2.0f;
                    for (uint hidden = 0; hidden < hiddenCount; hidden++, coordinates[2] += hiddenDelta)
                    {
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, hidden + inputCount + outputCount, weight));
                        }
                    }
                }
                coordinates[0] = -1 + hiddenDelta / 2.0f;
                coordinates[1] = 0;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint hidden = 0; hidden < hiddenCount; hidden++, coordinates[0] += hiddenDelta)
                {
                    coordinates[2] = -1 + outputDelta / 2.0f;
                    for (uint outputs = 0; outputs < outputCount; outputs++, coordinates[2] += outputDelta)
                    {
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, hidden + inputCount + outputCount, outputs + inputCount, weight));
                        }
                    }
                }
            }
            else
            {
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint input = 0; input < inputCount; input++, coordinates[0] += inputDelta)
                {
                    coordinates[2] = -1 + outputDelta / 2.0f;
                    for (uint outputs = 0; outputs < outputCount; outputs++, coordinates[2] += outputDelta)
                    {
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, outputs + inputCount, weight));
                        }
                    }
                }
            }
            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
        public override NeatGenome generateGenome(INetwork network)
        {
            var  coordinates               = new double[6];
            int  iterations                = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            uint connectionCounter         = 0;
            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));


            for (int layer = -1; layer < 1; layer++)
            {
                coordinates[0] = layer;
                coordinates[3] = layer + 1;
                uint srcRow = 0;
                for (float row1 = -1; row1 <= 1; row1 += 0.5f, srcRow++)
                {
                    coordinates[1] = row1;
                    uint srcCol = 0;
                    for (float col1 = -1; col1 <= 1; col1 += 0.5f, srcCol++)
                    {
                        coordinates[2] = col1;
                        uint tarRow = 0;
                        for (float row2 = -1; row2 <= 1; row2 += 0.5f, tarRow++)
                        {
                            coordinates[4] = row2;
                            uint tarCol = 0;
                            for (float col2 = -1; col2 <= 1; col2 += 0.5f, tarCol++)
                            {
                                coordinates[5] = col2;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.MultipleSteps(iterations);
                                float output = network.GetOutputSignal(0);
                                network.ClearSignals();

                                if (Math.Abs(output) > threshold)
                                {
                                    uint source = srcRow * 5 + srcCol;
                                    if (layer == 0)
                                    {
                                        source += inputCount + outputCount;
                                    }
                                    uint target = tarRow * 5 + tarCol;
                                    if (layer == -1)
                                    {
                                        target += inputCount + outputCount;
                                    }
                                    else
                                    {
                                        target += inputCount;
                                    }

                                    float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                    connections.Add(new ConnectionGene(connectionCounter++, source, target, weight));
                                }
                            }

                            if (row2 == -0.5f)
                            {
                                row2 += 0.5f;
                            }
                        }
                    }

                    if (row1 == -0.5f)
                    {
                        row1 += 0.5f;
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
Beispiel #27
0
        public static NeatGenome Read(XmlElement xmlGenome)
        {
            int inputNeuronCount  = 0;
            int outputNeuronCount = 0;

            uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));

            //--- Read neuron genes into a list.
            NeuronGeneList neuronGeneList  = new NeuronGeneList();
            XmlNodeList    listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");

            foreach (XmlElement xmlNeuronGene in listNeuronGenes)
            {
                NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

                // Count the input and output neurons as we go.
                switch (neuronGene.NeuronType)
                {
                case NeuronType.Input:
                    inputNeuronCount++;
                    break;

                case NeuronType.Output:
                    outputNeuronCount++;
                    break;
                }

                neuronGeneList.Add(neuronGene);
            }

            //--- Read module genes into a list.
            List <ModuleGene> moduleGeneList  = new List <ModuleGene>();
            XmlNodeList       listModuleGenes = xmlGenome.SelectNodes("modules/module");

            foreach (XmlElement xmlModuleGene in listModuleGenes)
            {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

            //--- Read connection genes into a list.
            ConnectionGeneList connectionGeneList  = new ConnectionGeneList();
            XmlNodeList        listConnectionGenes = xmlGenome.SelectNodes("connections/connection");

            foreach (XmlElement xmlConnectionGene in listConnectionGenes)
            {
                connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
            }

            //return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            NeatGenome g = new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);

            g.Behavior            = ReadBehavior(xmlGenome.SelectSingleNode("behavior"));
            g.Behavior.objectives = new double[6];
            g.objectives          = new double[6];


            // JUSTIN: Read grid/trajectory info
            g.GridCoords          = ReadGrid(xmlGenome.SelectSingleNode("grid"));
            g.Behavior.trajectory = ReadTrajectory(xmlGenome.SelectSingleNode("trajectory"));

            return(g);
        }
Beispiel #28
0
        public override NeatGenome.NeatGenome generateGenome(INetwork network)
        {
            float[]            coordinates = new float[4];
            float              outputR_RGB, outputG_RGB, outputB_RGB, outputHeading_HiddenH, outputRGB_DIR, outputRGB_P, outputHiddenH_DIR, outputHiddenH_P;
            uint               connectionCounter = 0;
            int                iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            ConnectionGeneList connections       = new ConnectionGeneList();

            // Query the CPPN for the connections between the RGB input and hidden layers
            xDelta = 2.0f / (rgbOneDimension);
            yDelta = 2.0f / (rgbOneDimension);

            coordinates[0] = -1 + xDelta / 2.0f;
            coordinates[1] = -1 + yDelta / 2.0f;
            coordinates[2] = -1 + xDelta / 2.0f;
            coordinates[3] = -1 + yDelta / 2.0f;

            for (uint x = 0; x < rgbOneDimension; x++, coordinates[0] += xDelta)
            {
                // Reset the y1 coordinate and then loop through all possible values of y1
                coordinates[1] = -1 + yDelta / 2.0f;
                for (uint y = 0; y < rgbOneDimension; y++, coordinates[1] += yDelta)
                {
                    // Reset the x2 coordinate and then loop through all possible values of x2
                    coordinates[2] = -1 + xDelta / 2.0f;
                    for (uint x2 = 0; x2 < rgbOneDimension; x2++, coordinates[2] += xDelta)
                    {
                        // Reset the y2 coordinate then loop through all possible values of y2
                        coordinates[3] = -1 + yDelta / 2.0f;
                        for (uint y2 = 0; y2 < rgbOneDimension; y2++, coordinates[3] += yDelta)
                        {
                            // Set the CPPN inputs, activate the CPPN, and read the output signals
                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.MultipleSteps(iterations);
                            outputR_RGB = network.GetOutputSignal(0);
                            outputG_RGB = network.GetOutputSignal(1);
                            outputB_RGB = network.GetOutputSignal(2);

                            // Calculate the weight of the R->RGB connection based on the CPPN output
                            if (Math.Abs(outputR_RGB) > threshold)
                            {
                                float weight = (float)(((Math.Abs(outputR_RGB) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputR_RGB));
                                //float weight = 0.0f;
                                connections.Add(new ConnectionGene(connectionCounter++, x + rgbOneDimension * y, (x2 + rgbOneDimension * y2) + inputCount + outputCount, weight));
                                //Console.WriteLine("R Generated connection from " + (x + rgbOneDimension * y) + " to " + ((x2 + rgbOneDimension * y2) + inputCount + outputCount));
                            }

                            // Calculate the weight of the G->RGB connection based on the CPPN output
                            if (Math.Abs(outputG_RGB) > threshold)
                            {
                                float weight = (float)(((Math.Abs(outputG_RGB) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputG_RGB));
                                //float weight = 0.0f;
                                connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + colorArraySize, (x2 + rgbOneDimension * y2) + inputCount + outputCount, weight));
                                //Console.WriteLine("G Generated connection from " + (x + rgbOneDimension * y) + " to " + ((x2 + rgbOneDimension * y2) + inputCount + outputCount));
                            }

                            // Calculate the weight of the B->RGB connection based on the CPPN output
                            if (Math.Abs(outputB_RGB) > threshold)
                            {
                                float weight = (float)(((Math.Abs(outputB_RGB) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputB_RGB));
                                //float weight = 0.0f;
                                connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + 2 * colorArraySize, (x2 + rgbOneDimension * y2) + inputCount + outputCount, weight));
                                //Console.WriteLine("B Generated connection from " + (x + rgbOneDimension * y) + " to " + ((x2 + rgbOneDimension * y2) + inputCount + outputCount));
                            }
                        }
                    }
                }
            }

            // Query the CPPN for the connections between the H input and hidden layers

            /*
             * uint headingX = 3;
             * uint headingY = 3;
             * xDelta = 1.0f;
             * yDelta = 1.0f;
             *
             * coordinates[0] = -1.0f;
             * coordinates[1] = -1.0f;
             * coordinates[2] = -1.0f;
             * coordinates[3] = -1.0f;
             *
             * for (uint x = 0; x < headingX; x++, coordinates[0] += xDelta)
             * {
             *  // Reset the y1 coordinate and then loop through all possible values of y1
             *  coordinates[1] = -1.0f;
             *  for (uint y = 0; y < headingY; y++, coordinates[1] += yDelta)
             *  {
             *      // Reset the x2 coordinate and then loop through all possible values of x2
             *      coordinates[2] = -1.0f;
             *      for (uint x2 = 0; x2 < headingX; x2++, coordinates[2] += xDelta)
             *      {
             *          // Reset the y2 coordinate then loop through all possible values of y2
             *          coordinates[3] = -1.0f;
             *          for (uint y2 = 0; y2 < headingY; y2++, coordinates[3] += yDelta)
             *          {
             *              // Don't query for (0,0) - this substrate does not have a node in the center of the heading plane
             *              if (!((coordinates[0] == 0.0f && coordinates[1] == 0.0f) || (coordinates[2] == 0.0f && coordinates[3] == 0.0f)))
             *              {
             *                  // Set the CPPN inputs, activate the CPPN, and read the output signals
             *                  network.ClearSignals();
             *                  network.SetInputSignals(coordinates);
             *                  network.MultipleSteps(iterations);
             *                  outputHeading_HiddenH = network.GetOutputSignal(3);
             *
             *                  // Calculate the weight of the HD->HDP connection based on the CPPN output
             *                  if (Math.Abs(outputHeading_HiddenH) > threshold)
             *                  {
             *                      float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
             *                      connections.Add(new ConnectionGene(connectionCounter++, 3 * colorArraySize + (x + headingX * y), (x2 + headingX * y2) + inputCount + outputCount + colorArraySize, weight));
             *                      //Console.WriteLine("HD Generated connection from " + (3 * colorArraySize + (x + headingX * y)) + " to " + ((x2 + headingX * y2) + inputCount + outputCount + colorArraySize));
             *                  }
             *              }
             *          }
             *      }
             *  }
             * }*/


            xDelta = 1.0f;
            uint headingX = 3;

            coordinates[0] = -1.0f;
            coordinates[1] = -1.0f;

            // Determine all connections from H row 1
            for (uint src = 300; src < 303; src++, coordinates[0] += xDelta)
            {
                // Reset the x2 coordinate and then loop through all possible values of x2
                coordinates[2] = -1.0f;
                for (uint tgt = 412; tgt < 415; tgt++, coordinates[2] += xDelta)
                {
                    // Query the CPPN for the connections between H row 1 and HDP row 1
                    coordinates[3] = -1.0f;

                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }

                    // Query the CPPN for the connections between H row 1 and HDP row 3
                    coordinates[3] = 1.0f;

                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt + 5, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + (tgt + 5) + " with weight " + weight);
                    }
                }
            }

            coordinates[0] = -1.0f;
            coordinates[1] = 1.0f;

            // Determine all connections from H row 3
            for (uint src = 305; src < 308; src++, coordinates[0] += xDelta)
            {
                // Reset the x2 coordinate and then loop through all possible values of x2
                coordinates[2] = -1.0f;
                for (uint x2 = 0; x2 < headingX; x2++, coordinates[2] += xDelta)
                {
                    // Query the CPPN for the connections between H row 3 and HDP row 1
                    coordinates[3] = -1.0f;

                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, 412 + x2, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + (412 + x2) + " with weight " + weight);
                    }

                    // Query the CPPN for the connections between H row 3 and HDP row 3
                    coordinates[3] = 1.0f;

                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, 417 + x2, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + (417 + x2) + " with weight " + weight);
                    }
                }
            }

            // Determine the connections for the H and HDP middle rows

            // Query the CPPN for the connections between H row 1 and HDP row 2
            coordinates[0] = -1.0f;
            coordinates[1] = -1.0f;
            coordinates[3] = 0.0f;
            for (uint src = 300; src < 303; src++, coordinates[0] += xDelta)
            {
                coordinates[2] = -1.0f;
                for (uint tgt = 415; tgt < 417; tgt++, coordinates[2] += 2.0f)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }
                }
            }

            // Query the CPPN for the connections between H row 2 and HDP row 2
            coordinates[0] = -1.0f;
            coordinates[1] = 0.0f;
            coordinates[3] = 0.0f;

            for (uint src = 303; src < 305; src++, coordinates[0] += 2.0f)
            {
                coordinates[2] = -1.0f;
                for (uint tgt = 415; tgt < 417; tgt++, coordinates[2] += 2.0f)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }
                }
            }


            // Query the CPPN for the connections between H row 3 and HDP row 2
            coordinates[0] = -1.0f;
            coordinates[1] = 1.0f;
            coordinates[3] = 0.0f;
            for (uint src = 305; src < 308; src++, coordinates[0] += xDelta)
            {
                coordinates[2] = -1.0f;
                for (uint tgt = 415; tgt < 417; tgt++, coordinates[2] += 2.0f)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }
                }
            }

            // Query the CPPN for the connections between H row 2 and HDP row 1
            coordinates[1] = 0.0f;
            coordinates[2] = -1.0f;
            for (uint src = 303; src < 305; src++, coordinates[2] += 2.0f)
            {
                coordinates[0] = -1.0f;
                coordinates[3] = -1.0f;
                for (uint tgt = 412; tgt < 415; tgt++, coordinates[0] += xDelta)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }
                }

                // Query the CPPN for the connections between H row 2 and HDP row 3
                coordinates[0] = -1.0f;
                coordinates[3] = 1.0f;
                for (uint tgt = 417; tgt < 420; tgt++, coordinates[0] += xDelta)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }
                }
            }

            // Query the CPPN for the connections between the RGB hidden layer and movement output nodes
            xDelta = 2.0f / (rgbOneDimension);
            yDelta = 2.0f / (rgbOneDimension);

            coordinates[0] = -1 + xDelta / 2.0f;
            coordinates[1] = -1 + yDelta / 2.0f;

            for (uint x = 0; x < rgbOneDimension; x++, coordinates[0] += xDelta)
            {
                // Reset the y1 coordinate and then loop through all possible values of y1
                coordinates[1] = -1 + yDelta / 2.0f;
                for (uint y = 0; y < rgbOneDimension; y++, coordinates[1] += yDelta)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = -1.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputRGB_DIR = network.GetOutputSignal(4);

                    // Calculate the weight of the RGB->L connection based on the CPPN output
                    if (Math.Abs(outputRGB_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputRGB_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputRGB_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + inputCount + outputCount, 308, weight));
                        //Console.WriteLine("RGB Generated connection from " + ((x + rgbOneDimension * y) + inputCount + outputCount) + " to 310");
                    }

                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = 0.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputRGB_DIR = network.GetOutputSignal(4);
                    outputRGB_P   = network.GetOutputSignal(5);

                    // Calculate the weight of the RGB->S connection based on the CPPN output
                    if (Math.Abs(outputRGB_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputRGB_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputRGB_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + inputCount + outputCount, 309, weight));
                        //Console.WriteLine("RGB Generated connection from " + ((x + rgbOneDimension * y) + inputCount + outputCount) + " to 311");
                    }

                    // Calculate the weight of the RGB->P connection based on the CPPN output
                    if (Math.Abs(outputRGB_P) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputRGB_P) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputRGB_P));
                        connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + inputCount + outputCount, 311, weight));
                        //Console.WriteLine("RGB Generated connection from " + ((x + rgbOneDimension * y) + inputCount + outputCount) + " to 313");
                    }

                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = 1.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputRGB_DIR = network.GetOutputSignal(4);

                    // Calculate the weight of the RGB->R connection based on the CPPN output
                    if (Math.Abs(outputRGB_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputRGB_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputRGB_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + inputCount + outputCount, 310, weight));
                        //Console.WriteLine("RGB Generated connection from " + ((x + rgbOneDimension * y) + inputCount + outputCount) + " to 312");
                    }
                }
            }

            // Query the CPPN for the connections between the Heading hidden and output layers
            xDelta = 1.0f;
            yDelta = 1.0f;

            coordinates[0] = -1.0f;

            // Determine the connections for heading hidden rows 1 and 3
            for (uint src = 412; src < 415; src++, coordinates[0] += xDelta)
            {
                uint srcOffset = 0;
                for (coordinates[1] = -1.0f; coordinates[1] < 3.0f; coordinates[1] += 2.0f)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = -1.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHiddenH_DIR = network.GetOutputSignal(6);

                    // Calculate the weight of the RGB->O connection based on the CPPN output
                    if (Math.Abs(outputHiddenH_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, src + srcOffset, 308, weight));
                        //Console.WriteLine("Generated connection from " + (src+srcOffset) + " to 308 with weight " + weight);
                    }

                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = 0.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHiddenH_DIR = network.GetOutputSignal(6);
                    outputHiddenH_P   = network.GetOutputSignal(7);

                    // Calculate the weight of the RGB->O connection based on the CPPN output
                    if (Math.Abs(outputHiddenH_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, src + srcOffset, 309, weight));
                        //Console.WriteLine("Generated connection from " + (src + srcOffset) + " to 309 with weight " + weight);
                    }

                    // Calculate the weight of the RGB->O connection based on the CPPN output
                    if (Math.Abs(outputHiddenH_P) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHiddenH_P) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_P));
                        connections.Add(new ConnectionGene(connectionCounter++, src + srcOffset, 311, weight));
                        //Console.WriteLine("Generated connection from " + (src + srcOffset) + " to 311 with weight " + weight);
                    }

                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = 1.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHiddenH_DIR = network.GetOutputSignal(6);

                    // Calculate the weight of the RGB->O connection based on the CPPN output
                    if (Math.Abs(outputHiddenH_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, src + srcOffset, 310, weight));
                        //Console.WriteLine("Generated connection from " + (src + srcOffset) + " to 310 with weight " + weight);
                    }

                    srcOffset += 5;
                }
            }

            coordinates[0] = -1.0f;
            coordinates[1] = 0.0f;

            // Determine the connections for heading hidden row 2
            for (uint src = 415; src < 417; src++, coordinates[0] += 2.0f)
            {
                // Set the CPPN inputs, activate the CPPN, and read the output signal
                network.ClearSignals();
                coordinates[2] = -1.0f;
                coordinates[3] = 0.0f;
                network.SetInputSignals(coordinates);
                network.MultipleSteps(iterations);
                outputHiddenH_DIR = network.GetOutputSignal(6);

                // Calculate the weight of the RGB->O connection based on the CPPN output
                if (Math.Abs(outputHiddenH_DIR) > threshold)
                {
                    float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                    connections.Add(new ConnectionGene(connectionCounter++, src, 308, weight));
                    //Console.WriteLine("Generated connection from " + src + " to 308 with weight " + weight);
                }

                // Set the CPPN inputs, activate the CPPN, and read the output signal
                network.ClearSignals();
                coordinates[2] = 0.0f;
                coordinates[3] = 0.0f;
                network.SetInputSignals(coordinates);
                network.MultipleSteps(iterations);
                outputHiddenH_DIR = network.GetOutputSignal(6);
                outputHiddenH_P   = network.GetOutputSignal(7);

                // Calculate the weight of the RGB->O connection based on the CPPN output
                if (Math.Abs(outputHiddenH_DIR) > threshold)
                {
                    float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                    connections.Add(new ConnectionGene(connectionCounter++, src, 309, weight));
                    //Console.WriteLine("Generated connection from " + src + " to 309 with weight " + weight);
                }

                // Calculate the weight of the RGB->O connection based on the CPPN output
                if (Math.Abs(outputHiddenH_P) > threshold)
                {
                    float weight = (float)(((Math.Abs(outputHiddenH_P) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_P));
                    connections.Add(new ConnectionGene(connectionCounter++, src, 311, weight));
                    //Console.WriteLine("Generated connection from " + src + " to 311 with weight " + weight);
                }

                // Set the CPPN inputs, activate the CPPN, and read the output signal
                network.ClearSignals();
                coordinates[2] = 1.0f;
                coordinates[3] = 0.0f;
                network.SetInputSignals(coordinates);
                network.MultipleSteps(iterations);
                outputHiddenH_DIR = network.GetOutputSignal(6);

                // Calculate the weight of the RGB->O connection based on the CPPN output
                if (Math.Abs(outputHiddenH_DIR) > threshold)
                {
                    float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                    connections.Add(new ConnectionGene(connectionCounter++, src, 310, weight));
                    //Console.WriteLine("Generated connection from " + src + " to 310 with weight " + weight);
                }
            }


            // Return a genome combining the already-specified neurons with the CPPN-generated connections
            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
    public override NeatGenome generateGenome(INetwork network)
    {
        // copy the neuron list to a new list and update the x/y values
        NeuronGeneList newNeurons = new NeuronGeneList(neurons);

        // set the x and y value of the SUPGs
        foreach (NeuronGene neuron in newNeurons)
        {
            /*if (neuron.NeuronType == NeuronType.Hidden)
             * {*/
            // switch to grid substrate configuration
            Point point = GetCustomPos(neuron.InnovationId);
            neuron.XValue = point.X;
            neuron.YValue = point.Y;
            if (neuron.NeuronType != NeuronType.Input)
            {
                neuron.ActivationFunction = new SteepenedSigmoid();
            }

            /*neuron.TimeConstant = 1;
             * neuron.NeuronBias = 0;*/
            /*neuron.XValue = getXPos3(neuron.InnovationId);
            *  neuron.YValue = getYPos3(neuron.InnovationId);*/
            //}
        }

        ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));

        float[] coordinates = new float[5];
        float   output;
        uint    connectionCounter = 0;
        int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

        // connect hidden layer to outputs

        /*for (uint source = 0; source < hiddenCount; source++)
         * {
         *  coordinates[0] = getXPos(source, false);
         *  coordinates[1] = getYPos(source, false);
         *
         *  for (uint target = 0; target < outputCount; target++)
         *  {
         *      // only connect hidden nodes to their single nearest output
         *      if (source == target)
         *      {
         *          coordinates[2] = getXPos(target, true);
         *          coordinates[3] = getYPos(target, true);
         *
         *          // GWM - fixing weight to 1 for SUPG producing motor outputs
         *          connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, target + inputCount, 1));
         *      }
         *  }
         * }*/

        // Connections from input to hidden
        for (uint source = 0; source < inputCount; source++)
        {
            //connections.Add(new ConnectionGene(connectionCounter++, source, 6, getActivation(network,source,6,newNeurons)));
            connections.Add(new ConnectionGene(connectionCounter++, source, 9, getActivation(network, source, 9, newNeurons)));
            connections.Add(new ConnectionGene(connectionCounter++, source, 10, getActivation(network, source, 10, newNeurons)));
            connections.Add(new ConnectionGene(connectionCounter++, source, 11, getActivation(network, source, 11, newNeurons)));
        }
        // Connection from input to output
        connections.Add(new ConnectionGene(connectionCounter++, 0, 6, getActivation(network, 0, 6, newNeurons)));
        connections.Add(new ConnectionGene(connectionCounter++, 3, 6, getActivation(network, 3, 6, newNeurons)));
        // Connections from hidden to hidden
        for (uint source = 0; source < hiddenCount - 2; source++)
        {
            uint tmpSource = source + inputCount + outputCount;
            connections.Add(new ConnectionGene(connectionCounter++, tmpSource, 12, getActivation(network, tmpSource, 12, newNeurons)));
            connections.Add(new ConnectionGene(connectionCounter++, tmpSource, 13, getActivation(network, tmpSource, 13, newNeurons)));
        }
        // Connections from hidden to output
        for (uint source = 0; source < 2; source++)
        {
            uint tmpSource = source + inputCount + outputCount + 3;
            connections.Add(new ConnectionGene(connectionCounter++, tmpSource, 4, getActivation(network, tmpSource, 4, newNeurons)));
            connections.Add(new ConnectionGene(connectionCounter++, tmpSource, 5, getActivation(network, tmpSource, 5, newNeurons)));
        }

        return(new SharpNeatLib.NeatGenome.NeatGenome(0, newNeurons, connections, (int)inputCount, (int)outputCount));
    }
        public NeatGenome GenerateGenome(INetwork network)
        {
            int maxIterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            // TODO:
            maxIterations = Math.Min(maxIterations, 4);

            double epsilon     = 0.0;
            double threshold   = HyperNEATParameters.threshold;
            double weightRange = HyperNEATParameters.weightRange;

            // store constant ids for later references

            uint biasid       = 0u;
            uint inputsStart  = m_useBias ? 1u : 0u;
            uint inputsEnd    = (uint)(inputsStart + m_inputsCount);
            uint outputsStart = inputsEnd;
            uint outputsEnd   = (uint)(outputsStart + m_outputsCount);

            float[]            coordinates = new float[4];
            float              output;
            uint               connectionCounter = 0;
            ConnectionGeneList connections       = new ConnectionGeneList();

            if (m_useBias)
            {
                // we use the bias neuron on the center, and use the
                // 2nd output of the CPPN to compute its weight
                coordinates[0] = 0.0f;
                coordinates[1] = 0.0f;

                // add the bias link to all output neurons
                for (int ni = 0; ni < m_outputsCount; ni++)
                {
                    float tempX, tempY;
                    GetOutputNodeCoordinate(ni, out tempX, out tempY);
                    coordinates[2] = tempX;
                    coordinates[3] = tempY;

                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(m_cppnOutIndex + 1);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, biasid, (uint)(outputsStart + ni), weight));
                    }
                }
            }

            // now add possible connections between all input output neuron pairs

            for (int inpi = 0; inpi < m_inputsCount; inpi++)
            {
                float inpx, inpy;
                GetInputNodeCoordinate(inpi, out inpx, out inpy);
                coordinates[0] = inpx;
                coordinates[1] = inpy;

                for (int outi = 0; outi < m_outputsCount; outi++)
                {
                    float outx, outy;
                    GetOutputNodeCoordinate(outi, out outx, out outy);

                    coordinates[2] = outx;
                    coordinates[3] = outy;

                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(m_cppnOutIndex);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++,
                                                           (uint)(inputsStart + inpi), (uint)(outputsStart + outi), weight));
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, m_neurons, connections, m_inputsCount, m_outputsCount));
        }
Beispiel #31
0
        void ensureSingleConnectedStructure(ConnectionGeneList connections, List <PointF> hiddenNeurons, Dictionary <long, PointF> conSourcePoints, Dictionary <long, PointF> conTargetPoints)
        {
            List <List <long> > allChains = new List <List <long> >();
            int maxChain = 0;

            foreach (var con in connections)
            {
                bool        isInChain = false;
                List <long> nChain    = null;
                foreach (List <long> chain in allChains)
                {
                    maxChain = Math.Max(chain.Count, maxChain);

                    if (chain.Contains(con.SourceNeuronId) || chain.Contains(con.TargetNeuronId))
                    {
                        nChain    = chain;
                        isInChain = true;
                        break;
                    }
                }

                if (!isInChain)
                {
                    nChain = new List <long>();
                    allChains.Add(nChain);
                }

                if (!nChain.Contains(con.SourceNeuronId))
                {
                    nChain.Add(con.SourceNeuronId);
                }

                if (!nChain.Contains(con.TargetNeuronId))
                {
                    nChain.Add(con.TargetNeuronId);
                }
            }


            List <long> finalChain = allChains.Find(chain => chain.Count == maxChain);

            if (finalChain != null && finalChain.Count != 0)
            {
                List <ConnectionGene> markDelete = new List <ConnectionGene>();

                foreach (var conn in connections)
                {
                    bool delete = false;
                    //if we don't have you in our chain, get rid of the object
                    if (!finalChain.Contains(conn.SourceNeuronId))
                    {
                        hiddenNeurons.Remove(conSourcePoints[conn.InnovationId]);
                        delete = true;
                    }

                    if (!finalChain.Contains(conn.TargetNeuronId))
                    {
                        hiddenNeurons.Remove(conTargetPoints[conn.InnovationId]);
                        delete = true;
                    }

                    if (delete)
                    {
                        markDelete.Add(conn);
                    }
                }
                markDelete.ForEach(x => connections.Remove(x));
                //connections.RemoveAll(x => !(finalChain.Contains(x.SourceNeuronId) || finalChain.Contains(x.TargetNeuronId)));
                //hiddenNeurons.RemoveAll(hn =>
            }

            connections.ForEach(con =>
            {
                //readjust connection source/target depending on hiddenNeuron array
                PointF point       = conSourcePoints[con.InnovationId];
                con.SourceNeuronId = hiddenNeurons.FindIndex(hp => hp.X == point.X && hp.Y == point.Y);

                if (con.SourceNeuronId == -1)
                {
                    Console.WriteLine("Adjusted con src- " + con.SourceNeuronId + " tgt- " + con.TargetNeuronId);
                }

                point = conTargetPoints[con.InnovationId];
                con.TargetNeuronId = hiddenNeurons.FindIndex(hp => hp.X == point.X && hp.Y == point.Y);

                if (con.TargetNeuronId == -1)
                {
                    Console.WriteLine("Adjusted con src- " + con.SourceNeuronId + " tgt- " + con.TargetNeuronId);
                }
            });
        }
Beispiel #32
0
		private void EnsureMutableConnectionGeneList()
		{
			if(mutableConnectionGeneList!=null)
				return;

			mutableConnectionGeneList = new ConnectionGeneList();

			int bound = connectionGeneList.Count;
			for(int i=0; i<bound; i++)
			{
				ConnectionGene connectionGene = connectionGeneList[i];
				if(!connectionGene.FixedWeight)
					mutableConnectionGeneList.Add(connectionGene);
			}
		}
		/// <summary>
		/// Create a default minimal genome that describes a NN with the given number of inputs and outputs.
		/// </summary>
		/// <returns></returns>
		public static IGenome CreateGenome(NeatParameters neatParameters, IdGenerator idGenerator, int inputNeuronCount, int outputNeuronCount, int outputsPerPolicy, float connectionProportion)
		{
            IActivationFunction actFunct;
			NeuronGene neuronGene; // temp variable.
			NeuronGeneList inputNeuronGeneList = new NeuronGeneList(); // includes bias neuron.
			NeuronGeneList outputNeuronGeneList = new NeuronGeneList();
			NeuronGeneList neuronGeneList = new NeuronGeneList();
			ConnectionGeneList connectionGeneList = new ConnectionGeneList();

			// IMPORTANT NOTE: The neurons must all be created prior to any connections. That way all of the genomes
			// will obtain the same innovation ID's for the bias,input and output nodes in the initial population.
			// Create a single bias neuron.
            //TODO: DAVID proper activation function change to NULL?
            actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
            //neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Bias, actFunct);
            neuronGene = new NeuronGene(null, idGenerator.NextInnovationId, NeuronGene.INPUT_LAYER, NeuronType.Bias, actFunct);
			inputNeuronGeneList.Add(neuronGene);
			neuronGeneList.Add(neuronGene);

			// Create input neuron genes.
            actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
			for(int i=0; i<inputNeuronCount; i++)
			{
                //TODO: DAVID proper activation function change to NULL?
                //neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Input, actFunct);
                neuronGene = new NeuronGene(null, idGenerator.NextInnovationId, NeuronGene.INPUT_LAYER, NeuronType.Input, actFunct);
				inputNeuronGeneList.Add(neuronGene);
				neuronGeneList.Add(neuronGene);
			}

			// Create output neuron genes. 
            //actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
			for(int i=0; i<outputNeuronCount; i++)
			{
                actFunct = ActivationFunctionFactory.GetActivationFunction("BipolarSigmoid");
                //actFunct = ActivationFunctionFactory.GetRandomActivationFunction(neatParameters);
                //TODO: DAVID proper activation function
                //neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Output, actFunct);
                neuronGene = new NeuronGene(null, idGenerator.NextInnovationId, NeuronGene.OUTPUT_LAYER, NeuronType.Output, actFunct);
				outputNeuronGeneList.Add(neuronGene);
				neuronGeneList.Add(neuronGene);
			}

			// Loop over all possible connections from input to output nodes and create a number of connections based upon
			// connectionProportion.
			foreach(NeuronGene targetNeuronGene in outputNeuronGeneList)
			{
				foreach(NeuronGene sourceNeuronGene in inputNeuronGeneList)
				{
					// Always generate an ID even if we aren't going to use it. This is necessary to ensure connections
					// between the same neurons always have the same ID throughout the generated population.
					uint connectionInnovationId = idGenerator.NextInnovationId;

					if(Utilities.NextDouble() < connectionProportion)
					{	// Ok lets create a connection.
						connectionGeneList.Add(	new ConnectionGene(connectionInnovationId, 
							sourceNeuronGene.InnovationId,
							targetNeuronGene.InnovationId,
							(Utilities.NextDouble() * neatParameters.connectionWeightRange ) - neatParameters.connectionWeightRange/2.0));  // Weight 0 +-5
					}
				}
			}

            // Don't create any hidden nodes at this point. Fundamental to the NEAT way is to start minimally!
            // Schrum: Added outputsPerPolicy: If outputsPerPolicy == outputNeuronCount, then behaves like default NEAT
            return new NeatGenome(idGenerator.NextGenomeId, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount, outputsPerPolicy);
		}
Beispiel #34
0
 // Schrum: Added this intermediate constructor to lead to my modified one below
 public NeatGenome(uint genomeId,
                 NeuronGeneList neuronGeneList,
                 List<ModuleGene> moduleGeneList,
                 ConnectionGeneList connectionGeneList,
                 int inputNeuronCount,
                 int outputNeuronCount)
     : this(genomeId, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount, outputNeuronCount) { }
        // NOTE: Multi-Plane Substrates ARE MAYBE supported by this method!
        private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool  adaptiveNetwork,bool  modulatoryNet)
        {
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));
            float[] coordinates = new float[4]; //JUSTIN: CHANGE THIS BACK TO [4]!!!
            float output;
            uint connectionCounter = 0;
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount;
            uint totalInputCount = InputCount;
            uint totalHiddenCount = HiddenCount;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            // CPPN Outputs: [ Weights ] [ Biases ]
            // When using multi-plane substrates, there will be multiple Weight and Bias outputs.
            // There is a Weight output for every plane-to-plane connection (including a plane connected to itself, as in regular substrates)
            // There is a Bias output for every plane
            // Since "regular substrates" only have 1 plane, they only have 1 Weight and 1 Bias output. MP substrates have more. :)
            int numPlanes = planes.Count;
            int numPlaneConnections = planesConnected.Count;
            int computedIndex;

            uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;

            foreach (NeuronGroup ng in neuronGroups)
            {
                foreach (uint connectedTo in ng.ConnectedTo)
                {
                    connectedNG = getNeuronGroup(connectedTo);

                    sourceCount = 0;
                    foreach (PointF source in ng.NeuronPositions)
                    {

                        //-----------------Get the bias of the source node
                        /*switch (ng.GroupType)
                        {
                            case 0: sourceID = ng.GlobalID + sourceCount; break;                             //Input
                            case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                //Output
                            case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break;  //Hidden
                        }
                        coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RecursiveActivation();//network.MultipleSteps(iterations);

                        neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                        //*///----------------------------

                        targetCout = 0;
                        foreach (PointF target in connectedNG.NeuronPositions)
                        {
                            switch (ng.GroupType)
                            {
                                case 0: sourceID = ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break;  //Hidden
                            }

                            switch (connectedNG.GroupType)
                            {
                                case 0: targetID = connectedNG.GlobalID + targetCout; break;
                                case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;
                                case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
                            }

                            //-----------------Get the bias of the target node
                            coordinates[0] = target.X; coordinates[1] = target.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
                            //coordinates[4] = 0.0f; coordinates[5] = 0.0f; //JUSTIN: REMOVE THIS!!!
                            //String s = arrayToString(coordinates);
                            //if (weights.ContainsKey(s))
                            //    neurons[(int)targetID].Bias = weights[s];
                            //else
                            {
                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.RecursiveActivation();//network.MultipleSteps(iterations);
                                computedIndex = numPlaneConnections + planes.IndexOf(connectedNG.Plane);
                                //neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(computedIndex) * weightRange);
                                //weights.Add(s,neurons[(int)targetID].Bias);
                            }
                            //----------------------------

                            coordinates[0] = source.X;
                            coordinates[1] = source.Y;
                            coordinates[2] = target.X;
                            coordinates[3] = target.Y;
                            //coordinates[4] = source.X - target.X; coordinates[5] = source.Y - target.Y; //JUSTIN: REMOVE THIS!!!

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.RecursiveActivation();//network.MultipleSteps(iterations);
                            computedIndex = indexOfPlaneConnection(ng.Plane, connectedNG.Plane);
                            //output = network.GetOutputSignal(0);
                            output = network.GetOutputSignal(computedIndex);

                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates));
                            }
                            //else
                            //{
                            //    Console.WriteLine("Not connected");
                            //}
                            targetCout++;
                        }
                        sourceCount++;
                    }
                }
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
            return gn;
        }
Beispiel #36
0
        //public static

        public static NeatGenome Read(XmlElement xmlGenome)
        {
            int inputNeuronCount  = 0;
            int outputNeuronCount = 0;

            uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));

            //--- Read neuron genes into a list.
            NeuronGeneList neuronGeneList  = new NeuronGeneList();
            XmlNodeList    listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");

            foreach (XmlElement xmlNeuronGene in listNeuronGenes)
            {
                NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

                // Count the input and output neurons as we go.
                switch (neuronGene.NeuronType)
                {
                case NeuronType.Input:
                    inputNeuronCount++;
                    break;

                case NeuronType.Output:
                    outputNeuronCount++;
                    break;
                }

                neuronGeneList.Add(neuronGene);
            }

            //--- Read module genes into a list.
            List <ModuleGene> moduleGeneList  = new List <ModuleGene>();
            XmlNodeList       listModuleGenes = xmlGenome.SelectNodes("modules/module");

            foreach (XmlElement xmlModuleGene in listModuleGenes)
            {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

            //--- Read connection genes into a list.
            ConnectionGeneList connectionGeneList  = new ConnectionGeneList();
            XmlNodeList        listConnectionGenes = xmlGenome.SelectNodes("connections/connection");

            foreach (XmlElement xmlConnectionGene in listConnectionGenes)
            {
                connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
            }


            // Read behavior list
            NeatGenome g = new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);

            XmlNode behaviorNode = xmlGenome.SelectSingleNode("behavior");

            if (behaviorNode != null)
            {
                g.Behavior            = ReadBehavior(behaviorNode); //TODO bug is here
                g.Behavior.objectives = new double[6];
                g.objectives          = new double[6];
            }



            /*
             * XmlNode behaviorNode = xmlGenome.SelectSingleNode("behavior");
             * if (behaviorNode != null)
             * {
             *  g.Behavior = ReadBehavior(behaviorNode.SelectSingleNode("list")); //TODO bug is here
             *  g.Behavior.objectives = new double[6];
             *  g.objectives = new double[6];
             * }
             */



            return(g);
        }
        // NOTE: Multi-Plane Substrates ARE supported by this method!
        private NeatGenome.NeatGenome generateHiveBrainGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet,bool ct)
        {
            //bool relativeCoordinate = false;
            bool oneWay = false;
            bool homogeneous = false ;
            Dictionary<String, float> weights = new Dictionary<String, float>();
            float timeConstantMin = 0.1f;
            float timeConstantMax = 2.0f;

            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount))); // TODO: Perhaps get an exact count of connections in the constructor and use that value here?
            float[] coordinates = new float[5]; //JUSTIN: Used to be 6 coordinates, zstack was duplicated for relativeCoordinate hyjinx. fixed it. // Inputs to the CPPN: [srcX, srcY, tgX, tgY, zstack]
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            // CPPN Outputs: [ Weights ] [ Biases ]
            // When using multi-plane substrates, there will be multiple Weight and Bias outputs.
            // There is a Weight output for every plane-to-plane connection (including a plane connected to itself, as in regular substrates)
            // There is a Bias output for every plane
            // Since "regular substrates" only have 1 plane, they only have 1 Weight and 1 Bias output. MP substrates have more. :)
            int numPlanes = planes.Count;
            int numPlaneConnections = planesConnected.Count;
            int computedIndex;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                //coordinates[4] = homogeneous ? 0 : stackCoordinate;//-1 ? -1 : 0;//0;//stackCoordinate;
                //coordinates[5] = stackCoordinate;
                uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        /*if (!relativeCoordinate)
                            coordinates[5] = stackCoordinate;
                        else //USE RELATIVE
                            coordinates[5] = 0;//*/

                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {

                            //-----------------Get the bias of the source node
                           /* switch (ng.GroupType)
                            {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                            }
                            coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.RecursiveActivation();//network.MultipleSteps(iterations);

                            neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                            if (ct)
                            {
                                neurons[(int)sourceID].TimeConstant = 0.01f + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * .05f);
                                System.Diagnostics.Debug.Assert(neurons[(int)sourceID].TimeConstant > 0);
                            }*/
                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                    case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //-----------------Get the bias of the target node
                                   coordinates[0] = target.X; coordinates[1] = target.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
                                   //String s = arrayToString(coordinates);
                                   //if (weights.ContainsKey(s))
                                   //    neurons[(int)targetID].Bias = weights[s];
                                   //else
                                   {
                                       network.ClearSignals();
                                       network.SetInputSignals(coordinates);
                                       network.RecursiveActivation();//network.MultipleSteps(iterations);
                                       computedIndex = numPlaneConnections + planes.IndexOf(connectedNG.Plane);
                                       //neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                       neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(computedIndex) * weightRange);
                                       //weights.Add(s,neurons[(int)targetID].Bias);
                                   }
                                   if (ct)
                                   {
                                       neurons[(int)targetID].TimeConstant = timeConstantMin + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * (timeConstantMax - timeConstantMin));
                                       System.Diagnostics.Debug.Assert(neurons[(int)targetID].TimeConstant > 0);
                                   }
                                //----------------------------

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;
                                //Console.WriteLine(arrayToString(coordinates));
                                
                                //if(weights.ContainsKey(s))
                                //    output = weights[s];
                                //else
                                {
                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    network.RecursiveActivation();//network.MultipleSteps(iterations);
                                    computedIndex = indexOfPlaneConnection(ng.Plane, connectedNG.Plane);
                                    //output = network.GetOutputSignal(0);
                                    output = network.GetOutputSignal(computedIndex);
                                    //weights.Add(s, output);
                                }
                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A = network.GetOutputSignal(2);
                                    B = network.GetOutputSignal(3);
                                    C = network.GetOutputSignal(4);
                                    D = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates));
                                    }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }

                    foreach (uint connectedTo in ng.HiveConnectedTo)
                    {
                        bool wrapAround = true;

                        for (uint agentConnect = 0; agentConnect < stackCoordinates.Count; agentConnect++)
                        {
                            //Make sure we're not making a recurrent connection on the same agent
                            //if (agentConnect == agent)
                            //    continue;
                           // else if ((agent == stackCoordinates.Count - 1 && agentConnect == 0) || (agent == 0 && agentConnect == stackCoordinates.Count - 1))
                           //     ;//agentConnect = 0;
                            if (agent != 0 && agent != stackCoordinates.Count - 1)
                                continue;

                            //if (agent == 1)
                            //    continue;

                            //if (agentConnect != 0 )
                            //    continue;

                            //Limits connections to only neighbors.  Good?
                            //if (!((agent == 0 || agentConnect >= agent - 1) && agentConnect <= agent + 1))
                            //    continue;
                            //if (agentConnect > agent + 1 || agentConnect < agent - 1)
                            //    continue;

                            if (oneWay)
                            {
                                //ONE-WAY
                                if (agentConnect > agent + 1 || agentConnect < agent)
                                    continue;
                            }

                            /*if (!relativeCoordinate)
                                //USE THE Z COORDINATE
                                coordinates[5] = stackCoordinates[(int)agentConnect];
                            else
                                //USE THE RELATIVE COORDINATE
                                coordinates[5] = agentConnect > agent ? 1 : -1;
                            //*/
                            //WRAP AROUND
                            /*if (agent == stackCoordinates.Count - 1 && agentConnect == 0)
                                coordinates[5] = 1;
                            else if (agent == 0 && agentConnect == stackCoordinates.Count - 1)
                                coordinates[5] = -1;
                             */

                            connectedNG = getNeuronGroup(connectedTo);

                            sourceCount = 0;
                            foreach (PointF source in ng.NeuronPositions)
                            {

                                //-----------------Get the bias of the source node
                               /* switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                }
                                coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.RecursiveActivation();//network.MultipleSteps(iterations);

                                neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                if (ct)
                                {
                                    neurons[(int)sourceID].TimeConstant = 0.01f + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * .05f);
                                    System.Diagnostics.Debug.Assert(neurons[(int)sourceID].TimeConstant > 0);
                                }*/
                                //----------------------------

                                targetCout = 0;
                                foreach (PointF target in connectedNG.NeuronPositions)
                                {
                                    /*if ((source.X != target.X))
                                    {
                                        targetCout++;
                                        continue;
                                    }*/
                                    if (/*source.X!= target.X ||*/ target.X != coordinates[4])// || source.X!= coordinates[4])
                                    {
                                        targetCout++;
                                        continue;
                                    }
                                   /* if (agent != 0 && agent != stackCoordinates.Count - 1)
                                    { 
                                        if(agentConnect != 0 && agentConnect != stackCoordinates.Count - 1)
                                        {
                                            targetCout++;
                                            continue;
                                        }
                                    }*/
                                    switch (ng.GroupType)
                                    {
                                        case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                        case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                        case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                    }

                                    switch (connectedNG.GroupType)
                                    {
                                        case 0: targetID = (agentConnect * InputCount) + connectedNG.GlobalID + targetCout; break;
                                        case 1: targetID = totalInputCount + (agentConnect * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                        case 2: targetID = totalInputCount + totalOutputCount + (agentConnect * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                    }

                                    //-----------------Get the bias of the target node
                                    coordinates[0] = target.X; coordinates[1] = target.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
                                    //String s = arrayToString(coordinates);
                                    //if (weights.ContainsKey(s))
                                    //    neurons[(int)targetID].Bias = weights[s];
                                    //else
                                    {
                                        network.ClearSignals();
                                        network.SetInputSignals(coordinates);
                                        network.RecursiveActivation();//network.MultipleSteps(iterations);
                                        computedIndex = numPlaneConnections + planes.IndexOf(connectedNG.Plane);
                                        //neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                        neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(computedIndex) * weightRange);
                                       // weights.Add(s, neurons[(int)targetID].Bias);
                                    }
                                    if (ct)
                                    {
                                        neurons[(int)targetID].TimeConstant = timeConstantMin + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * (timeConstantMax - timeConstantMin));
                                        System.Diagnostics.Debug.Assert(neurons[(int)targetID].TimeConstant > 0);
                                    }
                                    //----------------------------

                                    coordinates[0] = source.X;
                                    coordinates[1] = source.Y;
                                    coordinates[2] = target.X;
                                    coordinates[3] = target.Y;
                                    //s = arrayToString(coordinates);
                                    //if (weights.ContainsKey(s))
                                    //    output = weights[s];
                                    //else
                                    {
                                        network.ClearSignals();
                                        network.SetInputSignals(coordinates);
                                        network.RecursiveActivation();//network.MultipleSteps(iterations);
                                        computedIndex = indexOfPlaneConnection(ng.Plane, connectedNG.Plane);
                                        //output = network.GetOutputSignal(0);
                                        output = network.GetOutputSignal(computedIndex);
                                      //  weights.Add(s, output);
                                    }

                                    double leo = 0.0;

                                    if (adaptiveNetwork)
                                    {
                                        A = network.GetOutputSignal(2);
                                        B = network.GetOutputSignal(3);
                                        C = network.GetOutputSignal(4);
                                        D = network.GetOutputSignal(5);
                                        learningRate = network.GetOutputSignal(6);
                                    }

                                    if (modulatoryNet)
                                    {
                                        modConnection = network.GetOutputSignal(7);
                                    }
                                    else
                                    {
                                        modConnection = 0.0f;
                                    }

                                    if (useLeo)
                                    {
                                        threshold = 0.0;
                                        leo = network.GetOutputSignal(2);
                                    }

                                    if (!useLeo || leo > 0.0)
                                        if (Math.Abs(output) > threshold)
                                        {
                                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                            //if (adaptiveNetwork)
                                            //{
                                            //    //If adaptive network set weight to small value
                                            //    weight = 0.1f;
                                            //}
                                            connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, true));
                                        }
                                    //else
                                    //{
                                    //    Console.WriteLine("Not connected");
                                    //}
                                    targetCout++;
                                }
                                sourceCount++;
                            }
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
        public NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet, bool dirComm)
        {
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount) +
                numberOfAgents * (ReceiveCount * HiddenCount) + numberOfAgents * (HiddenCount * TransCount)));
            float[] coordinates = new float[5];
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;
            uint totalTransCount = TransCount * numberOfAgents;
            uint totalReceiveCount = ReceiveCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden|receive|transmit
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents + ReceiveCount * numberOfAgents + TransCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }
            // set up the receive nodes
            for (uint a = 0; a < totalReceiveCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents, NeuronType.Receive, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the transmit nodes
            for (uint a = 0; a < totalTransCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents + ReceiveCount * numberOfAgents, NeuronType.Transmit, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount + totalReceiveCount + totalTransCount];

            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                    case 3: sourceID = totalInputCount + totalOutputCount + totalHiddenCount + (agent * ReceiveCount) + ng.GlobalID + sourceCount; break; //Receive
                                    case 4: sourceID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + ng.GlobalID + sourceCount; break; //Transmit
                                }

                                switch (connectedNG.GroupType)
                                {
                                    case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                    case 3: targetID = totalInputCount + totalOutputCount + totalHiddenCount + (agent * ReceiveCount) + connectedNG.GlobalID + targetCout; break;
                                    case 4: targetID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //target node bias
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID] = true;
                                }

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A = network.GetOutputSignal(2);
                                    B = network.GetOutputSignal(3);
                                    C = network.GetOutputSignal(4);
                                    D = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            //Add Direct Communication connections
            if (dirComm)
            {
                uint numConnected = ReceiveCount / TransCount;
                agent = 0;

                foreach (float stackCoordinate in stackCoordinates)
                {
                    SortedList<float, uint> closestAgents = new SortedList<float, uint>();
                    uint i = 0;
                    foreach (float otherCoordinate in stackCoordinates)
                    {
                        if (i == agent) continue;
                        float delta = Math.Abs(stackCoordinate - otherCoordinate);
                        closestAgents.Add(delta, i);
                        i++;
                    }
                    uint[] orderedAgents = new uint[numberOfAgents];
                    closestAgents.Values.CopyTo(orderedAgents, 0);
                    uint[] connectedAgents = new uint[numConnected];
                    for (uint j = 0; j < numConnected; j++)
                    {
                        connectedAgents[j] = orderedAgents[j];
                    }

                    foreach (NeuronGroup ng in neuronGroups)
                    {
                        if (ng.GroupType != 4) continue;
                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {
                            uint sourceID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + ng.GlobalID + sourceCount;

                            foreach (uint connectedAgent in connectedAgents)
                            {
                                uint targetID = totalInputCount + totalOutputCount + totalHiddenCount + (connectedAgent * ReceiveCount) + (ng.GlobalID * numConnected) + agent;

                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, 1.0));
                            }

                            sourceCount++;
                        }
                    }

                    agent++;
                }

            }

            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
        public override NeatGenome.NeatGenome generateGenome(INetwork network)
        {
            #if OUTPUT
            System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
            #endif
            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));
            float[] coordinates = new float[4];
            float output;
            uint connectionCounter = 0;
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            coordinates[0] = -1 + inputDelta / 2.0f;
            coordinates[1] = -1;
            coordinates[2] = -1 + hiddenDelta / 2.0f;
            coordinates[3] = 0;

            for (uint source = 0; source < inputCount; source++, coordinates[0] += inputDelta)
            {
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                for (uint target = 0; target < hiddenCount; target++, coordinates[2] += hiddenDelta)
                {

                    //Since there are an equal number of input and hidden nodes, we check these everytime
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    output = network.GetOutputSignal(0);
            #if OUTPUT
                            foreach (double d in inputs)
                                sw.Write(d + " ");
                            sw.Write(output);
                            sw.WriteLine();
            #endif
                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, source, target + inputCount + outputCount, weight));
                    }

                    //Since every other hidden node has a corresponding output node, we check every other time
                    if (target % 2 == 0)
                    {
                        network.ClearSignals();
                        coordinates[1] = 0;
                        coordinates[3] = 1;
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);
            #if OUTPUT
                            foreach (double d in inputs)
                                sw.Write(d + " ");
                            sw.Write(output);
                            sw.WriteLine();
            #endif
                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, (target / 2) + inputCount, weight));
                        }
                        coordinates[1] = -1;
                        coordinates[3] = 0;

                    }
                }
            }
            #if OUTPUT
            sw.Flush();
            #endif
            return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
        }
        private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));
            float[] coordinates = new float[4];
            float output;
            uint connectionCounter = 0;
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount;
            uint totalInputCount = InputCount;
            uint totalHiddenCount = HiddenCount;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;

            foreach (NeuronGroup ng in neuronGroups)
            {
                foreach (uint connectedTo in ng.ConnectedTo)
                {
                    connectedNG = getNeuronGroup(connectedTo);

                    sourceCount = 0;
                    foreach (PointF source in ng.NeuronPositions)
                    {

                        targetCout = 0;
                        foreach (PointF target in connectedNG.NeuronPositions)
                        {
                            switch (ng.GroupType)
                            {
                                case 0: sourceID = ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break;  //Hidden
                            }

                            switch (connectedNG.GroupType)
                            {
                                case 0: targetID = connectedNG.GlobalID + targetCout; break;
                                case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;
                                case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
                            }

                            //calculate bias of target node
                            if (!biasCalculated[targetID])
                            {
                                coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                biasCalculated[targetID] = true;
                            }

                            coordinates[0] = source.X;
                            coordinates[1] = source.Y;
                            coordinates[2] = target.X;
                            coordinates[3] = target.Y;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            ((ModularNetwork)network).RecursiveActivation();
                            //network.MultipleSteps(iterations);
                            output = network.GetOutputSignal(0);

                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                            }
                            //else
                            //{
                            //    Console.WriteLine("Not connected");
                            //}
                            targetCout++;
                        }
                        sourceCount++;
                    }
                }
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
            return gn;
        }
        private NeatGenome.NeatGenome generateHomogeneousGenomeES(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            List <PointF> hiddenNeuronPositions = new List <PointF>();

            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList();//(int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            List <PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List <PointF> inputNeuronPositions  = getNeuronGroupByType(0);


            EvolvableSubstrate se = new EvolvableSubstrate();

            se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                                   HyperNEATParameters.initialRes,
                                   (float)HyperNEATParameters.varianceThreshold,
                                   (float)HyperNEATParameters.bandingThreshold,
                                   (int)HyperNEATParameters.ESIterations,
                                   (float)HyperNEATParameters.divisionThreshold,
                                   HyperNEATParameters.maximumRes,
                                   InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref connections, ref hiddenNeuronPositions);

            HiddenCount = (uint)hiddenNeuronPositions.Count;

            float[] coordinates       = new float[5];
            uint    connectionCounter = (uint)connections.Count;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < InputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            // set up the output nodes
            for (uint a = 0; a < OutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < HiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[]      visited  = new bool[neurons.Count];
            List <uint> nodeList = new List <uint>();

            bool[] connectedToInput = new bool[neurons.Count];

            bool[] isOutput = new bool[neurons.Count];

            bool danglingConnection = true;

            while (danglingConnection)
            {
                bool[] hasIncomming = new bool[neurons.Count];

                foreach (ConnectionGene co in connections)
                {
                    //  if (co.SourceNeuronId != co.TargetNeuronId)
                    // {
                    hasIncomming[co.TargetNeuronId] = true;
                    // }
                }
                for (int i = 0; i < InputCount; i++)
                {
                    hasIncomming[i] = true;
                }

                bool[] hasOutgoing = new bool[neurons.Count];
                foreach (ConnectionGene co in connections)
                {
                    //  if (co.TargetNeuronId != co.SourceNeuronId)
                    //  {
                    if (co.TargetNeuronId != co.SourceNeuronId)  //neurons that only connect to themselfs don't count
                    {
                        hasOutgoing[co.SourceNeuronId] = true;
                    }
                    //  }
                }

                //Keep  output neurons
                for (int i = 0; i < OutputCount; i++)
                {
                    hasOutgoing[i + InputCount] = true;
                }


                danglingConnection = false;
                //Check if there are still dangling connections
                foreach (ConnectionGene co in connections)
                {
                    if (!hasOutgoing[co.TargetNeuronId] || !hasIncomming[co.SourceNeuronId])
                    {
                        danglingConnection = true;
                        break;
                    }
                }

                connections.RemoveAll(delegate(ConnectionGene m) { return(!hasIncomming[m.SourceNeuronId]); });
                connections.RemoveAll(delegate(ConnectionGene m) { return(!hasOutgoing[m.TargetNeuronId]); });
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            SharpNeatLib.NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(InputCount), (int)(OutputCount));
            //     SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable  = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;

            return(gn);
        }
Beispiel #42
0
        public virtual NeatGenome.NeatGenome generateGenome(INetwork network)
        {
            int    maxIterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            double epsilon       = 0.0;

            uint firstBias   = 0;
            uint lastBias    = biasCount;
            uint firstInput  = biasCount;
            uint lastInput   = biasCount + inputCount;
            uint firstOutput = biasCount + inputCount;
            uint lastOutput  = biasCount + inputCount + outputCount;
            uint firstHidden = biasCount + inputCount + outputCount;
            uint lastHidden  = biasCount + inputCount + outputCount + hiddenCount;

            float[]            coordinates = new float[4];
            float              output;
            uint               connectionCounter = 0;
            ConnectionGeneList connections       = new ConnectionGeneList();

            // give bias inputs to all hidden and output nodes.
            // the source of the the link is located at (0,0), the target is each node, and the weight of the link is the second output of CPPN.
            coordinates[0] = 0;
            coordinates[1] = 0;
            for (uint bias = firstBias; bias < lastBias; bias++)
            {
                // link the bias to all hidden nodes.
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                coordinates[3] = 0;
                for (uint hidden = firstHidden; hidden < lastHidden; hidden++)
                {
                    coordinates[2] += hiddenDelta;
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(1);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, bias, hidden, weight));
                    }
                }

                // link the bias to all output nodes.
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint outp = firstOutput; outp < lastOutput; outp++)
                {
                    coordinates[2] += outputDelta;
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(1);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, bias, outp, weight));
                    }
                }
            }

            if (hiddenCount > 0)
            {
                // link all input nodes to all hidden nodes.
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                coordinates[3] = 0;
                for (uint input = firstInput; input < lastInput; input++)
                {
                    coordinates[0] += inputDelta;
                    coordinates[2]  = -1 + hiddenDelta / 2.0f;
                    for (uint hidden = firstHidden; hidden < lastHidden; hidden++)
                    {
                        coordinates[2] += hiddenDelta;
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RelaxNetwork(maxIterations, epsilon);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, hidden, weight));
                        }
                    }
                }

                // link all hidden nodes to all output nodes.
                coordinates[0] = -1 + hiddenDelta / 2.0f;
                coordinates[1] = 0;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint hidden = firstHidden; hidden < lastHidden; hidden++)
                {
                    coordinates[0] += hiddenDelta;
                    coordinates[2]  = -1 + outputDelta / 2.0f;
                    for (uint outp = firstOutput; outp < lastOutput; outp++)
                    {
                        coordinates[2] += outputDelta;
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RelaxNetwork(maxIterations, epsilon);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, hidden, outp, weight));
                        }
                    }
                }
            }
            else     // there are no hidden nodes, so connect each input to each output only.
            // link all input nodes to all output nodes.
            {
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint input = firstInput; input < lastInput; input++)
                {
                    coordinates[0] += inputDelta;
                    coordinates[2]  = -1 + outputDelta / 2.0f;
                    for (uint outp = firstOutput; outp < lastOutput; output++)
                    {
                        coordinates[2] += outputDelta;
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RelaxNetwork(maxIterations, epsilon);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, outp, weight));
                        }
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
        public NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List <float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));

            float[] coordinates = new float[5];
            float   output;
            uint    connectionCounter = 0;
            float   agentDelta        = 2.0f / (numberOfAgents - 1);
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount  = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            // Schrum: debugging

            /*
             * Console.WriteLine("generateMultiGenomeStack");
             * Console.WriteLine("numberOfAgents:" + numberOfAgents);
             * Console.WriteLine("totalOutputCount:" + totalOutputCount);
             * Console.WriteLine("totalInputCount:" + totalInputCount);
             */

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            uint  agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {
                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                                       //Input

                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                    //Output

                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break; //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;

                                case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;

                                case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //target node bias
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID]    = true;
                                }

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A            = network.GetOutputSignal(2);
                                    B            = network.GetOutputSignal(3);
                                    C            = network.GetOutputSignal(4);
                                    D            = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo       = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                {
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable  = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;

            // Schrum: debugging
            //Console.WriteLine("sng.InputNeuronCount:" + sng.InputNeuronCount);
            //Console.WriteLine("sng.OutputNeuronCount:" + sng.OutputNeuronCount);

            return(sng);
        }
        public override SharpNeatLib.NeatGenome.NeatGenome GenerateGenome(INetwork network)
        {
            int maxIterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            // TODO:
            maxIterations = Math.Min(maxIterations, 4);

            double epsilon     = 0.0;
            double threshold   = HyperNEATParameters.threshold;
            double weightRange = HyperNEATParameters.weightRange;

            // store constant ids for later references

            uint biasid       = 0u;
            uint inputsStart  = m_useBias ? 1u : 0u;
            uint inputsEnd    = (uint)(inputsStart + (m_rows * m_cols));
            uint outputsStart = inputsEnd;
            uint outputsEnd   = (uint)(outputsStart + (m_rows * m_cols));

            float[] coordinates = new float[8];

            coordinates[2] = coordinates[6] = m_homex;
            coordinates[3] = coordinates[7] = m_homey;

            float output;
            uint  connectionCounter        = 0;
            ConnectionGeneList connections = new ConnectionGeneList();

            if (m_useBias)
            {
                // we use the bias neuron on the center, and use the
                // 2nd output of the CPPN to compute its weight
                coordinates[0] = 0.0f;
                coordinates[1] = 0.0f;

                // add the bias link to all output neurons
                for (int ni = 0, ncount = m_rows * m_cols; ni < ncount; ni++)
                {
                    int row = (ni / m_cols);
                    int col = (ni % m_cols);

                    coordinates[4] = (-1.0f) + (m_colDelta * (col + 0.5f));
                    coordinates[5] = (-1.0f) + (m_rowDelta * (row + 0.5f));

                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(m_cppnOutIndex + 1);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, biasid, (uint)(outputsStart + ni), weight));
                    }
                }
            }

            // now add possible connections between all input output neuron pairs

            for (int inpi = 0, inpcount = m_rows * m_cols; inpi < inpcount; inpi++)
            {
                for (int outi = 0, outcount = m_rows * m_cols; outi < outcount; outi++)
                {
                    int inrow = (inpi / m_cols); int incol = (inpi % m_cols);
                    int outrow = (outi / m_cols); int outcol = (outi % m_cols);

                    coordinates[0] = (-1.0f) + (m_colDelta * (incol + 0.5f));
                    coordinates[1] = (-1.0f) + (m_rowDelta * (inrow + 0.5f));
                    coordinates[4] = (-1.0f) + (m_colDelta * (outcol + 0.5f));
                    coordinates[5] = (-1.0f) + (m_rowDelta * (outrow + 0.5f));

                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(m_cppnOutIndex);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++,
                                                           (uint)(inputsStart + inpi), (uint)(outputsStart + outi), weight));
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, m_neurons, connections, m_rows * m_cols, m_rows * m_cols));
        }
Beispiel #45
0
		public void FixConnectionWeights()
		{
			int bound = connectionGeneList.Count;
			for(int i=0; i<bound; i++)
				connectionGeneList[i].FixedWeight = true;
				
			// This will now be out of date. Although it should not need to be used after calling FixConnectionWeights.
			mutableConnectionGeneList=null;
		}
Beispiel #46
0
        /// <summary>
        /// Creates an array of FastConnection(s) representing the connectivity
        /// of the provided NeatGenome.
        /// </summary>
        private static void CreateFastConnectionArrays()
        {
            ConnectionGeneList connectionList = genome.ConnectionGeneList;
            INodeList          nodeList       = genome.NodeList;

            phenomeVariables.nonProtectedCount = 0;

            // Creates local lists to store connections so we can place elements
            // in the correct order, and also to sort them by fragments.
            List <ConnectionGene> toLocalInputList = new List <ConnectionGene>();
            List <ConnectionGene> inToRegList      = new List <ConnectionGene>();
            List <ConnectionGene> nonProtectedList = new List <ConnectionGene>();

            // PROTECTED connections from local output neurons will be treated in a
            // special way (because they are affected by regulatory neurons).
            // For this reason they come in 2D-arrays, organized by module.
            // The number of such connections in each module is also given in
            // a separate 1D-array.
            // Non-protected connections from local output neurons (recursive connections)
            // are treated normally, and are not affected by regulatory neurons.
            // Remember: module Ids are not (necessarily) in order, so we use
            // moduleIdToIndex
            phenomeVariables.localOutToRegOrLInConnect = new FastConnection[genome.Regulatory][];
            phenomeVariables.localOutToOutConnect      = new FastConnection[genome.Regulatory][];
            phenomeVariables.lOutToRegOrLInModuleCount = new int[genome.Regulatory];
            phenomeVariables.localOutToOutModuleCount  = new int[genome.Regulatory];
            // And we set all counts to 0 (because we will count with ++)
            Array.Clear(phenomeVariables.lOutToRegOrLInModuleCount, 0, genome.Regulatory);
            Array.Clear(phenomeVariables.localOutToOutModuleCount, 0, genome.Regulatory);
            // We count first how many to expect in each module, so we need
            // local lists.
            List <ConnectionGene> localOutToOutList          = new List <ConnectionGene>();
            List <ConnectionGene> localOutToRegOrLocalInList = new List <ConnectionGene>();

            // Loop the connections and lookup the neuron IDs for each
            // connection's end points using idToOldIndex.
            foreach (ConnectionGene connection in connectionList)
            {
                // We have to determine the type of connection:
                //          -Input (and bias) to local input
                //          -Non-protected
                //          -Local output to regulatory or local input
                //          -Local output to output

                // We are lookgin for "the type of the node with index
                // corresponding to the Id of the target of the connection".

                NodeType targetType =
                    nodeList[idToOldIndex[connection.TargetNodeId]].NodeType;
                if (!connection.Protected)
                {
                    // Count it and store it for later
                    nonProtectedList.Add(connection);
                    ++phenomeVariables.nonProtectedCount;
                }
                else if (targetType == NodeType.Local_Input)
                {
                    NodeType sourceType =
                        nodeList[idToOldIndex[connection.SourceNodeId]].NodeType;
                    if (sourceType == NodeType.Local_Output)
                    {
                        // This is local_out to local_in
                        localOutToRegOrLocalInList.Add(connection);
                        ++phenomeVariables.lOutToRegOrLInModuleCount[
                            moduleIdToIndex[connection.ModuleId]];
                    }
                    else
                    {
                        // This is bias/input to local input
                        toLocalInputList.Add(connection);
                    }
                }
                else if (targetType == NodeType.Regulatory)
                {
                    NodeType sourceType =
                        nodeList[idToOldIndex[connection.SourceNodeId]].NodeType;
                    if (sourceType == NodeType.Local_Output)
                    {
                        // This is local_out-to-regulatory
                        localOutToRegOrLocalInList.Add(connection);
                        ++phenomeVariables.lOutToRegOrLInModuleCount[
                            moduleIdToIndex[connection.ModuleId]];
                    }
                    else
                    {
                        // This is input/bias to regulatory. These connections
                        // go in the main connection array.
                        inToRegList.Add(connection);
                    }
                }
                else if (targetType == NodeType.Output)
                {
                    // This can only be local_out-to-out
                    localOutToOutList.Add(connection);
                    ++phenomeVariables.localOutToOutModuleCount[
                        moduleIdToIndex[connection.ModuleId]];
                }
            }

            // Now we process the local lists to make the definitive arrays
            ProcessLocalLists(toLocalInputList, inToRegList, nonProtectedList,
                              localOutToOutList, localOutToRegOrLocalInList);
        }
Beispiel #47
0
//		private void Mutate_ConnectionWeights(EvolutionAlgorithm ea)
//		{
//			float pColdGaussian, pMutation;
//			bool bMutateAllMutableConnections=false;
//			ConnectionGeneList tmpConnectionGeneList=null;
//
//			if(connectionGeneList.Count==0)
//				return;
//
//			// n% of the time perform more severe connection weight mutation (cold gaussian).
//			if(RouletteWheel.SingleThrow(0.5))
//			{
//				// TODO: Migrate mutation proportion values to the NeatParameters structure?
//				pMutation = 0.1F;
//				pColdGaussian = 1.0F;
//			}
//			else
//			{
//				pMutation = 0.8F;		// mutate 80% of weights.	
//				pColdGaussian = 0.0F;	// 0% of those are cold resets.
//			}
//
//
//			// Determine what type of mutation scheme to use.
//			if(ea.IsConnectionWeightFixingEnabled)
//			{
//				EnsureMutableConnectionGeneList();
//				if(mutableConnectionGeneList.Count==0)
//					return;
//
//				// Only mutate pMutation connections at most. If mutable connections make up a lesser proportion
//				// of total connections then just mutate all of the mutable connections.
//				float pMutableConnections = (float)mutableConnectionGeneList.Count / (float)connectionGeneList.Count;
//				if(pMutableConnections <= pMutation)
//					bMutateAllMutableConnections=true;
//				else
//					tmpConnectionGeneList = mutableConnectionGeneList;
//			}
//			else
//			{
//				tmpConnectionGeneList = connectionGeneList;
//			}
//
//			NeatParameters np = ea.NeatParameters;
//			if(bMutateAllMutableConnections)
//			{
//				// Mutate all connections in mutableConnectionGeneList.
//				int bound = mutableConnectionGeneList.Count;
//				for(int i=0; i<bound; i++)
//				{
//					ConnectionGene connectionGene = mutableConnectionGeneList[i];
//
//					if(Utilities.NextDouble() < pColdGaussian)
//					{	// Cold Normal dist.
//						connectionGene.Weight = (Utilities.NextDouble()* np.connectionWeightRange) - np.connectionWeightRange/2.0; 
//					}
//					else
//					{	// Normal distribution..
//						connectionGene.Weight = ValueMutation.Mutate(connectionGene.Weight, np.connectionMutationSigma);
//					}
//					// Cap the connection weight. Large connections weights reduce the effectiveness of the search.
//					connectionGene.Weight = Math.Max(connectionGene.Weight, -np.connectionWeightRange/2.0);
//					connectionGene.Weight = Math.Min(connectionGene.Weight, np.connectionWeightRange/2.0);
//				}
//			}
//			else
//			{
//				// Determine how many connections to mutate (minimum of 1)
//				int mutateCount = (int)Math.Ceiling(connectionGeneList.Count * pMutation);
//				for(int i=0; i<mutateCount; i++)
//				{
//					// Pick a connection at random.
//					ConnectionGene connectionGene = tmpConnectionGeneList[(int)(Utilities.NextDouble() * tmpConnectionGeneList.Count)];
//
//					if(Utilities.NextDouble() < pColdGaussian)
//					{	// Cold Normal dist.
//						connectionGene.Weight = (Utilities.NextDouble()*np.connectionWeightRange) - np.connectionWeightRange/2.0; 
//					}
//					else
//					{	// Normal distribution..
//						connectionGene.Weight = ValueMutation.Mutate(connectionGene.Weight, np.connectionMutationSigma);
//					}
//					// Cap the connection weight. Large connections weights reduce the effectiveness of the search.
//					connectionGene.Weight = Math.Max(connectionGene.Weight, - np.connectionWeightRange/2.0);
//					connectionGene.Weight = Math.Min(connectionGene.Weight, np.connectionWeightRange/2.0);
//				}
//			}
//		}

//		private void MutateWeight(ConnectionGene connectionGene, NeatParameters np)
//		{
//			if(Utilities.NextDouble() < 0.2)
//			{
//				connectionGene.Weight = (Utilities.NextDouble()*np.connectionWeightRange) - np.connectionWeightRange/2.0; 
//			}
//			else
//			{
//				connectionGene.Weight += (Utilities.NextDouble()*2-1.0) * 0.1;
//
//				// Cap the connection weight. Large connections weights reduce the effectiveness of the search.
//				connectionGene.Weight = Math.Max(connectionGene.Weight, - np.connectionWeightRange/2.0);
//				connectionGene.Weight = Math.Min(connectionGene.Weight, np.connectionWeightRange/2.0);
//			}
//		}

		/// <summary>
		/// Correlate the ConnectionGenes within the two ConnectionGeneLists - based upon innovation number.
		/// Return an ArrayList of ConnectionGene[2] structures - pairs of matching ConnectionGenes.
		/// </summary>
		/// <param name="list1"></param>
		/// <param name="list2"></param>
		/// <returns></returns>
		private CorrelationResults CorrelateConnectionGeneLists(ConnectionGeneList list1, ConnectionGeneList list2)
		{
			CorrelationResults correlationResults = new CorrelationResults();

		//----- Test for special cases.
			if(list1.Count==0 && list2.Count==0)
			{	// Both lists are empty!
				return correlationResults;
			}

			if(list1.Count==0)
			{	// All list2 genes are excess.
				correlationResults.CorrelationStatistics.ExcessConnectionGeneCount = list2.Count;
				foreach(ConnectionGene connectionGene in list2)
					correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.ExcessConnectionGene, null, connectionGene));

				return correlationResults;
			}

			if(list2.Count==0)
			{	// All list1 genes are excess.
				correlationResults.CorrelationStatistics.ExcessConnectionGeneCount = list1.Count;
				foreach(ConnectionGene connectionGene in list1)
					correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.ExcessConnectionGene, null, connectionGene));

				return correlationResults;
			}

		//----- Both ConnectionGeneLists contain genes - compare the contents.
			int list1Idx=0;
			int list2Idx=0;
			ConnectionGene connectionGene1 = list1[list1Idx];
			ConnectionGene connectionGene2 = list2[list2Idx];
			for(;;)
			{
				if(connectionGene2.InnovationId < connectionGene1.InnovationId)
				{	
					// connectionGene2 is disjoint.
					correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.DisjointConnectionGene, null, connectionGene2));
					correlationResults.CorrelationStatistics.DisjointConnectionGeneCount++;

					// Move to the next gene in list2.
					list2Idx++;
				}
				else if(connectionGene1.InnovationId == connectionGene2.InnovationId)
				{
					correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.MatchedConnectionGenes, connectionGene1, connectionGene2));
					correlationResults.CorrelationStatistics.ConnectionWeightDelta += Math.Abs(connectionGene1.Weight-connectionGene2.Weight);
					correlationResults.CorrelationStatistics.MatchingGeneCount++;

					// Move to the next gene in both lists.
					list1Idx++;
					list2Idx++;
				}
				else // (connectionGene2.InnovationId > connectionGene1.InnovationId)
				{	
					// connectionGene1 is disjoint.
					correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.DisjointConnectionGene, connectionGene1, null));
					correlationResults.CorrelationStatistics.DisjointConnectionGeneCount++;

					// Move to the next gene in list1.
					list1Idx++;
				}
				
				// Check if we have reached the end of one (or both) of the lists. If we have reached the end of both then 
				// we execute the first if block - but it doesn't matter since the loop is not entered if both lists have 
				// been exhausted.
				if(list1Idx >= list1.Count)
				{	
					// All remaining list2 genes are excess.
					for(; list2Idx<list2.Count; list2Idx++)
					{
						correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.ExcessConnectionGene, null, list2[list2Idx]));
						correlationResults.CorrelationStatistics.ExcessConnectionGeneCount++;
					}
					return correlationResults;
				}

				if(list2Idx >= list2.Count)
				{
					// All remaining list1 genes are excess.
					for(; list1Idx<list1.Count; list1Idx++)
					{
						correlationResults.CorrelationItemList.Add(new CorrelationItem(CorrelationItemType.ExcessConnectionGene, list1[list1Idx], null));
						correlationResults.CorrelationStatistics.ExcessConnectionGeneCount++;
					}
					return correlationResults;
				}

				connectionGene1 = list1[list1Idx];
				connectionGene2 = list2[list2Idx];
			}
		}
        // MPS NOT supported by this method
        private NeatGenome.NeatGenome generateMultiGenomeStackES(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            if (useMultiPlaneSubstrate) throw new Exception("MPS not implemented for these parameters");
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));
            float[] coordinates = new float[5];
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }


            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;


            List<PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List<PointF> inputNeuronPositions = getNeuronGroupByType(0);

            uint hiddenCount = 0;

            foreach (float stackCoordinate in stackCoordinates)
            {
                List<PointF> hiddenNeuronPositions = new List<PointF>();
                ConnectionGeneList con = new ConnectionGeneList();
                SubstrateEvolution se = new SubstrateEvolution();
                se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                    SubstrateEvolution.SAMPLE_WIDTH,
                    SubstrateEvolution.SAMPLE_TRESHOLD,
                    SubstrateEvolution.NEIGHBOR_LEVEL,
                    SubstrateEvolution.INCREASE_RESSOLUTION_THRESHOLD,
                    SubstrateEvolution.MIN_DISTANCE,
                    SubstrateEvolution.CONNECTION_TRESHOLD, //0.4. ConnectionThreshold
                    InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref con, ref hiddenNeuronPositions, stackCoordinate);

                // set up the hidden nodes
                for (uint a = 0; a < hiddenNeuronPositions.Count; a++)
                {
                    neurons.Add(new NeuronGene(hiddenCount + a + totalInputCount + totalOutputCount, NeuronType.Hidden, activationFunction));
                }



                foreach (ConnectionGene c in con)
                {
                    if (c.SourceNeuronId < InputCount)
                    {
                        c.SourceNeuronId += agent * InputCount;
                    }
                    else if (c.SourceNeuronId < InputCount + OutputCount)
                    {
                        c.SourceNeuronId = (c.SourceNeuronId - InputCount) + totalInputCount + agent * OutputCount;
                    }
                    else
                    {
                        c.SourceNeuronId = (uint)((c.SourceNeuronId - InputCount - OutputCount) + totalInputCount + totalOutputCount + hiddenCount);
                    }

                    if (c.TargetNeuronId < InputCount)
                    {
                        c.TargetNeuronId += agent * InputCount;
                    }
                    else if (c.TargetNeuronId < InputCount + OutputCount)
                    {
                        c.TargetNeuronId = (c.TargetNeuronId - InputCount) + totalInputCount + agent * OutputCount;
                    }
                    else
                    {
                        c.TargetNeuronId = (uint)((c.TargetNeuronId - InputCount - OutputCount) + totalInputCount + totalOutputCount + hiddenCount);
                    }

                    connections.Add(new ConnectionGene(connectionCounter++, c.SourceNeuronId, c.TargetNeuronId, c.Weight, ref c.coordinates));

                }
                hiddenCount += (uint)hiddenNeuronPositions.Count;
                agent++;

            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
Beispiel #49
0
		public override IGenome CreateOffspring_Sexual(EvolutionAlgorithm ea, IGenome parent)
		{
            NeatGenome otherParent = parent as NeatGenome;
            if (otherParent == null)
                return null;
            
            // Build a list of connections in either this genome or the other parent.
			CorrelationResults correlationResults = CorrelateConnectionGeneLists(connectionGeneList, otherParent.connectionGeneList);			
			Debug.Assert(correlationResults.PerformIntegrityCheck(), "CorrelationResults failed integrity check.");

			//----- Connection Genes.
			// We will temporarily store the offspring's genes in newConnectionGeneList and keeping track of which genes
			// exist with newConnectionGeneTable. Here we ensure these objects are created, and if they already existed
			// then ensure they are cleared. Clearing existing objects is more efficient that creating new ones because
			// allocated memory can be re-used.

            // Key = connection key, value = index in newConnectionGeneList.
			if(newConnectionGeneTable==null)
			{	// Provide a capacity figure to the new Hashtable. The offspring will be the same length (or thereabouts).
				newConnectionGeneTable = new Hashtable(connectionGeneList.Count);
			}
			else
			{
				newConnectionGeneTable.Clear();
			}
			//TODO: No 'capacity' constructor on CollectionBase. Create modified/custom CollectionBase.
			// newConnectionGeneList must be constructed on each call because it is passed to a new NeatGenome 
			// at construction time and a permanent reference to the list is kept.
            newConnectionGeneList = new ConnectionGeneList(ConnectionGeneList.Count);

			// A switch that stores which parent is fittest 1 or 2. Chooses randomly if both are equal. More efficient to calculate this just once.
			byte fitSwitch;
			if(Fitness > otherParent.Fitness)
				fitSwitch = 1;
			else if(Fitness < otherParent.Fitness)
				fitSwitch = 2;
			else
			{	// Select one of the parents at random to be the 'master' genome during crossover.
				if(Utilities.NextDouble() < 0.5)
					fitSwitch = 1;
				else
					fitSwitch = 2;
			}

			bool combineDisjointExcessFlag = Utilities.NextDouble() < ea.NeatParameters.pDisjointExcessGenesRecombined;

			// Loop through the correlationResults, building a table of ConnectionGenes from the parents that will make it into our 
			// new [single] offspring. We use a table keyed on connection end points to prevent passing connections to the offspring 
			// that may have the same end points but a different innovation number - effectively we filter out duplicate connections.
			int idxBound = correlationResults.CorrelationItemList.Count;
			for(int i=0; i<idxBound; i++)
			{
				CreateOffspring_Sexual_ProcessCorrelationItem((CorrelationItem)correlationResults.CorrelationItemList[i], fitSwitch, combineDisjointExcessFlag, ea.NeatParameters);
			}

			//----- Neuron Genes.
			// Build a neuronGeneList by analysing each connection's neuron end-point IDs.
			// This strategy has the benefit of eliminating neurons that are no longer connected too.
			// Remember to always keep all input, output and bias neurons though!
            NeuronGeneList newNeuronGeneList = new NeuronGeneList(neuronGeneList.Count);

			// Keep a table of the NeuronGene ID's keyed by ID so that we can keep track of which ones have been added.
            // Key = innovation ID, value = null for some reason.
			if(newNeuronGeneTable==null)
				newNeuronGeneTable = new Hashtable(neuronGeneList.Count);
			else
				newNeuronGeneTable.Clear();

			// Get the input/output neurons from this parent. All Genomes share these neurons, they do not change during a run.
			idxBound = neuronGeneList.Count;
			for(int i=0; i<idxBound; i++)
			{
				if(neuronGeneList[i].NeuronType != NeuronType.Hidden)
				{
					newNeuronGeneList.Add(new NeuronGene(neuronGeneList[i]));
					newNeuronGeneTable.Add(neuronGeneList[i].InnovationId, null);
				}
				else
				{	// No more bias, input or output nodes. break the loop.
					break;
				}
			}

			// Now analyse the connections to determine which NeuronGenes are required in the offspring.
            // Loop through every connection in the child, and add to the child those hidden neurons that are sources or targets of the connection.
			idxBound = newConnectionGeneList.Count;
			for(int i=0; i<idxBound; i++)
			{
                NeuronGene neuronGene;
				ConnectionGene connectionGene = newConnectionGeneList[i];
				if(!newNeuronGeneTable.ContainsKey(connectionGene.SourceNeuronId))
				{	
                    //TODO: DAVID proper activation function
					// We can safely assume that any missing NeuronGenes at this point are hidden heurons.
                   neuronGene = this.neuronGeneList.GetNeuronById(connectionGene.SourceNeuronId);
                    if (neuronGene != null)
                        newNeuronGeneList.Add(new NeuronGene(neuronGene));
                    else
                        newNeuronGeneList.Add(new NeuronGene(otherParent.NeuronGeneList.GetNeuronById(connectionGene.SourceNeuronId)));
                    //newNeuronGeneList.Add(new NeuronGene(connectionGene.SourceNeuronId, NeuronType.Hidden, ActivationFunctionFactory.GetActivationFunction("SteepenedSigmoid")));
					newNeuronGeneTable.Add(connectionGene.SourceNeuronId, null);
				}

				if(!newNeuronGeneTable.ContainsKey(connectionGene.TargetNeuronId))
				{	
                    //TODO: DAVID proper activation function
					// We can safely assume that any missing NeuronGenes at this point are hidden heurons.
                    neuronGene = this.neuronGeneList.GetNeuronById(connectionGene.TargetNeuronId);
                    if (neuronGene != null)
                        newNeuronGeneList.Add(new NeuronGene(neuronGene));
                    else
                        newNeuronGeneList.Add(new NeuronGene(otherParent.NeuronGeneList.GetNeuronById(connectionGene.TargetNeuronId)));
                    //newNeuronGeneList.Add(new NeuronGene(connectionGene.TargetNeuronId, NeuronType.Hidden, ActivationFunctionFactory.GetActivationFunction("SteepenedSigmoid")));
					newNeuronGeneTable.Add(connectionGene.TargetNeuronId, null);
				}
			}

            // Determine which modules to pass on to the child in the same way.
            // For each module in this genome or in the other parent, if it was referenced by even one connection add it and all its dummy neurons to the child.
            List<ModuleGene> newModuleGeneList = new List<ModuleGene>();

            // Build a list of modules the child might have, which is a union of the parents' module lists, but they are all copies so we can't just do a union.
            List<ModuleGene> unionParentModules = new List<ModuleGene>(moduleGeneList);
            foreach (ModuleGene moduleGene in otherParent.moduleGeneList) {
                bool alreadySeen = false;
                foreach (ModuleGene match in unionParentModules) {
                    if (moduleGene.InnovationId == match.InnovationId) {
                        alreadySeen = true;
                        break;
                    }
                }
                if (!alreadySeen) {
                    unionParentModules.Add(moduleGene);
                }
            }

            foreach (ModuleGene moduleGene in unionParentModules) {
                // Examine each neuron in the child to determine whether it is part of a module.
                foreach (List<uint> dummyNeuronList in new List<uint>[] { moduleGene.InputIds, moduleGene.OutputIds }) {
                    foreach (uint dummyNeuronId in dummyNeuronList) {
                        if (newNeuronGeneTable.ContainsKey(dummyNeuronId)) {
                            goto childHasModule;
                        }
                    }
                }

                continue; // the child does not contain this module, so continue the loop and check for the next module.
            childHasModule: // the child does contain this module, so make sure the child gets all the nodes the module requires to work.

                // Make sure the child has all the neurons in the given module.
                newModuleGeneList.Add(new ModuleGene(moduleGene));
                foreach (List<uint> dummyNeuronList in new List<uint>[] { moduleGene.InputIds, moduleGene.OutputIds }) {
                    foreach (uint dummyNeuronId in dummyNeuronList) {
                        if (!newNeuronGeneTable.ContainsKey(dummyNeuronId)) {
                            newNeuronGeneTable.Add(dummyNeuronId, null);
                            NeuronGene neuronGene = this.neuronGeneList.GetNeuronById(dummyNeuronId);
                            if (neuronGene != null) {
                                newNeuronGeneList.Add(new NeuronGene(neuronGene));
                            } else {
                                newNeuronGeneList.Add(new NeuronGene(otherParent.NeuronGeneList.GetNeuronById(dummyNeuronId)));
                            }
                        }
                    }
                }
            }

			// TODO: Inefficient code?
			newNeuronGeneList.SortByInnovationId();
            // Schrum: Need to calculate this because of Module Mutation adding extra outputs
            int revisedOutputCount = 0;
            foreach(NeuronGene n in newNeuronGeneList) {
                if (n.NeuronType == NeuronType.Output) 
                    revisedOutputCount++;
            }

			// newConnectionGeneList is already sorted because it was generated by passing over the list returned by
			// CorrelateConnectionGeneLists() - which is always in order.

            // Schrum: Modified to add outputsPerPolicy as a parameter, and use revisedOutputCount
			return new NeatGenome(ea.NextGenomeId, newNeuronGeneList, newModuleGeneList, newConnectionGeneList, inputNeuronCount, revisedOutputCount, outputsPerPolicy);
		}
        // MPS NOT supported by this method
        private NeatGenome.NeatGenome generateHomogeneousGenomeES(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            if (useMultiPlaneSubstrate) throw new Exception("MPS not implemented for these parameters");
            //CHECK TO SEE IF HIDDEN NEURONS ARE OUTPUT NEURONS
            List<PointF> hiddenNeuronPositions = new List<PointF>();

            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList();//(int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            List<PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List<PointF> inputNeuronPositions = getNeuronGroupByType(0);


            SubstrateEvolution se = new SubstrateEvolution();

            se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                SubstrateEvolution.SAMPLE_WIDTH,
                SubstrateEvolution.SAMPLE_TRESHOLD,
                SubstrateEvolution.NEIGHBOR_LEVEL,
                SubstrateEvolution.INCREASE_RESSOLUTION_THRESHOLD,
                SubstrateEvolution.MIN_DISTANCE,
                SubstrateEvolution.CONNECTION_TRESHOLD, //0.4. ConnectionThreshold
                InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref connections, ref hiddenNeuronPositions);

            HiddenCount = (uint)hiddenNeuronPositions.Count;

            float[] coordinates = new float[5];
            uint connectionCounter = (uint)connections.Count;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < InputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            // set up the output nodes
            for (uint a = 0; a < OutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
                
            }
            // set up the hidden nodes
            for (uint a = 0; a < HiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }


            uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;
            uint c1, c2;

            float delta = 0.15f;//2.0f / InputCount;
            float minDistance, dist, sourceX = -1, sourceY = -1, targetX = -1, targetY = -1;
            uint closestNodeIndex;
            //   int index, hiddenCount;

            //Connections to input nodes    
            // hiddenCount = 0;

            //TEST??????????????????????????????

            double tolerance = 0.1;
            //bool[] taken = new bool[hiddenNeuronGroup.NeuronPositions.Count];
            closestNodeIndex = 0;
            int ccc;

            //CONNECT FROM INPUT NODES
            // ConnectionGeneList addConnections = new ConnectionGeneList();
            targetID = 0;
            bool[] visited = new bool[neurons.Count];
            List<uint> nodeList = new List<uint>();
            bool[] connectedToInput = new bool[neurons.Count];

            //From hidden to output
            //taken = new bool[hiddenNeuronGroup.NeuronPositions.Count];
            // float targetX=-1.0f, targetY=-1.0f;
            targetID = 0;
            //  bool outputConnectedToInput;

            bool[] isOutput = new bool[neurons.Count];
            //float output, weight;
            //bool[] connectedToInput = new bool[neurons.Count];

            //bool connectToHidden;

            float totalConnectionDist = 0.0f;
            //Add connections between Hidden Neurons
            // addConnections.AddRange(connections);

            bool danglingConnection = true;

            while (danglingConnection)
            {
                bool[] hasIncomming = new bool[neurons.Count];

                foreach (ConnectionGene co in connections)
                {
                    //  if (co.SourceNeuronId != co.TargetNeuronId)
                    // {
                    hasIncomming[co.TargetNeuronId] = true;
                    // }
                }
                for (int i = 0; i < InputCount; i++)
                    hasIncomming[i] = true;

                bool[] hasOutgoing = new bool[neurons.Count];
                foreach (ConnectionGene co in connections)
                {
                    //  if (co.TargetNeuronId != co.SourceNeuronId)
                    //  {
                    if (co.TargetNeuronId != co.SourceNeuronId)  //neurons that only connect to themselfs don't count
                    {
                        hasOutgoing[co.SourceNeuronId] = true;
                    }
                    //  }
                }

                //Keep  output neurons
                for (int i = 0; i < OutputCount; i++)
                    hasOutgoing[i + InputCount] = true;


                danglingConnection = false;
                //Check if there are still dangling connections
                foreach (ConnectionGene co in connections)
                {
                    if (!hasOutgoing[co.TargetNeuronId] || !hasIncomming[co.SourceNeuronId])
                    {
                        danglingConnection = true;
                        break;
                    }
                }

                connections.RemoveAll(delegate(ConnectionGene m) { return (!hasIncomming[m.SourceNeuronId]); });
                connections.RemoveAll(delegate(ConnectionGene m) { return (!hasOutgoing[m.TargetNeuronId]); });
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            SharpNeatLib.NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(InputCount), (int)(OutputCount));
        //     SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
       
            return gn;
        }
        public override NeatGenome.NeatGenome generateGenome(INetwork network)
        {
#if OUTPUT
            System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
#endif
            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));
            float[]            coordinates = new float[4];
            float output;
            uint  connectionCounter = 0;
            int   iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            coordinates[0] = -1 + inputDelta / 2.0f;
            coordinates[1] = -1;
            coordinates[2] = -1 + hiddenDelta / 2.0f;
            coordinates[3] = 0;

            for (uint source = 0; source < inputCount; source++, coordinates[0] += inputDelta)
            {
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                for (uint target = 0; target < hiddenCount; target++, coordinates[2] += hiddenDelta)
                {
                    //Since there are an equal number of input and hidden nodes, we check these everytime
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    output = network.GetOutputSignal(0);
#if OUTPUT
                    foreach (double d in inputs)
                    {
                        sw.Write(d + " ");
                    }
                    sw.Write(output);
                    sw.WriteLine();
#endif
                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, source, target + inputCount + outputCount, weight));
                    }

                    //Since every other hidden node has a corresponding output node, we check every other time
                    if (target % 2 == 0)
                    {
                        network.ClearSignals();
                        coordinates[1] = 0;
                        coordinates[3] = 1;
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);
#if OUTPUT
                        foreach (double d in inputs)
                        {
                            sw.Write(d + " ");
                        }
                        sw.Write(output);
                        sw.WriteLine();
#endif
                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, (target / 2) + inputCount, weight));
                        }
                        coordinates[1] = -1;
                        coordinates[3] = 0;
                    }
                }
            }
#if OUTPUT
            sw.Flush();
#endif
            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
        public void generateConnections(List <PointF> inputNeuronPositions, List <PointF> outputNeuronPositions,
                                        INetwork genome, float sampleWidth, float sampleThreshold, float neighborLevel,
                                        float increaseResolutionThreshold, float minDistance,
                                        float connectionThreshold,
                                        uint inputCount, uint outputCount,
                                        float minX, float minY, float maxX, float maxY,
                                        ref ConnectionGeneList connections, ref List <PointF> hiddenNeurons, float stackNumber)
        {
            // SubstrateEvolution.stackNumber = stackNumber; //TODO hacky
            //  totalLength = 0.0f;
            List <Rect> rectList = new List <Rect>();

            //minX = -1.0f;
            //minY = -1.0f;
            //maxX = 1.0f;
            //maxY = 1.0f;

            List <ExpressPoint> _connections = new List <ExpressPoint>();

            List <PointF> hiddenPos = new List <PointF>();

            List <PointF> pointToAdd = new List <PointF>();
            float         targetX, targetY;


            double output;
            int    sourceIndex, targetIndex, neuronCount = 0;
            uint   counter = 0;
            float  weight;

            float[] connectionCoordinates = new float[5];

            connectionCoordinates[4] = stackNumber;

            //CONNECTION DIRECTLY FROM INPUT NODES
            List <PointF> tabuList = new List <PointF>();

            foreach (PointF input in inputNeuronPositions)
            {
                Rect startRec = new Rect(input.X, input.Y, true, null, 0, minX, minY, maxX, maxY, genome, stackNumber);
                startRec.createTree(sampleWidth);
                startRec.addPoints(ref _connections, ref sampleThreshold, ref neighborLevel, ref increaseResolutionThreshold, ref minDistance);

                foreach (ExpressPoint p in _connections)
                {
                    targetX = (p.x1 + p.x2) / 2.0f;
                    targetY = (p.y1 + p.y2) / 2.0f;

                    PointF newp = new PointF(targetX, targetY);
                    if (!hiddenPos.Contains(newp))
                    {
                        hiddenPos.Add(newp);
                        tabuList.Add(newp);
                    }
                }
            }
            foreach (ExpressPoint t in _connections)
            {
                connectionCoordinates[0] = t.fixedx;
                connectionCoordinates[1] = t.fixedy;
                connectionCoordinates[2] = (float)(t.x1 + t.x2) / 2.0f;
                connectionCoordinates[3] = (float)(t.y1 + t.y2) / 2.0f;

                //        double dist = Math.Sqrt(Math.Pow(connectionCoordinates[0] - connectionCoordinates[2], 2) +
                //Math.Pow(connectionCoordinates[1] - connectionCoordinates[3], 2));

                if (float.IsNaN(t.activationLevel))
                {
                    Console.WriteLine("Normally this shouldn't happen");
                    return;
                    //
                }
                else
                {
                    output = t.activationLevel;
                }

                //!remove
                //recurrent = (connectionCoordinates[0] == connectionCoordinates[2]) && (connectionCoordinates[1] == connectionCoordinates[3]);
                //
                //   if ((Math.Abs(output) > connectionThreshold)) //&& (pcount % 10 ==0))
                //  {
                PointF source = new PointF(connectionCoordinates[0], connectionCoordinates[1]);
                PointF target = new PointF(connectionCoordinates[2], connectionCoordinates[3]);


                //connectionList.Add(new Connection(x1, y1, x2, y2));

                sourceIndex = inputNeuronPositions.IndexOf(source); //TODO change. computationally expensive
                if (sourceIndex == -1)                              //!hiddenNeurons.Contains(source))
                {
                    Console.WriteLine("This shouldn't happen.");
                    sourceIndex = inputNeuronPositions.Count;
                    // hiddenNeurons.Add(source);
                    //  neuronCount++;
                }

                targetIndex = hiddenNeurons.IndexOf(target);
                if (targetIndex == -1) //!hiddenNeurons.Contains(target)) hiddenNeurons.Add(target);
                {
                    targetIndex = hiddenNeurons.Count;
                    hiddenNeurons.Add(target);
                    neuronCount++;
                }

                weight = (float)(((Math.Abs(output) - (connectionThreshold)) / (1 - connectionThreshold)) * WEIGHT_RANGE * Math.Sign(output));
                //if (weight > 0.0) weight = 1.0f;
                //else weight = -1.0f;

                connections.Add(new ConnectionGene(counter++, (uint)(sourceIndex), (uint)(targetIndex + inputCount + outputCount), weight, ref connectionCoordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                //   }
            }
            //return;//! remove
            _connections.Clear();
            //**************************************
            //Hidden to Hidden
            for (int step = 0; step < MAX_ITER_STEPS; step++)
            {
                pointToAdd.Clear();
                foreach (PointF hiddenP in hiddenPos)
                {
                    Rect startRec = new Rect(hiddenP.X, hiddenP.Y, true, null, 0, minX, minY, maxX, maxY, genome, stackNumber);
                    startRec.createTree(sampleWidth);
                    startRec.addPoints(ref _connections, ref sampleThreshold, ref neighborLevel, ref increaseResolutionThreshold, ref minDistance);

                    foreach (ExpressPoint p in _connections)
                    {
                        //     double dist = Math.Sqrt(Math.Pow(p.x1 - p.x2, 2) + Math.Pow(p.y1 - p.y2, 2));

                        // connectionThreshold*

                        //  if (Math.Abs(p.activationLevel) > connectionThreshold)//(dist / 2.5f + 0.5))
                        //  {
                        targetX = (p.x1 + p.x2) / 2.0f;
                        targetY = (p.y1 + p.y2) / 2.0f;
                        PointF newp = new PointF(targetX, targetY);
                        if (!tabuList.Contains(newp))
                        {
                            pointToAdd.Add(newp);
                            tabuList.Add(newp);
                        }
                        //  }
                        //  if (targetY>input.Y)
                        //
                    }
                }
                hiddenPos.Clear();
                if (pointToAdd.Count == 0)
                {
                    break;
                }
                hiddenPos.AddRange(pointToAdd);
            }


            foreach (ExpressPoint t in _connections)
            {
                connectionCoordinates[0] = t.fixedx;
                connectionCoordinates[1] = t.fixedy;
                connectionCoordinates[2] = (float)(t.x1 + t.x2) / 2.0f;
                connectionCoordinates[3] = (float)(t.y1 + t.y2) / 2.0f;

                //         double dist = Math.Sqrt(Math.Pow(connectionCoordinates[0] - connectionCoordinates[2], 2) +
                // Math.Pow(connectionCoordinates[1] - connectionCoordinates[3], 2));

                if (float.IsNaN(t.activationLevel))
                {
                    Console.WriteLine("Normally this shouldn't happen");
                    return;
                    //
                }
                else
                {
                    output = t.activationLevel;
                }

                //  if ((Math.Abs(output) > connectionThreshold)) //&& (pcount % 10 ==0))
                // {
                PointF source = new PointF(connectionCoordinates[0], connectionCoordinates[1]);
                PointF target = new PointF(connectionCoordinates[2], connectionCoordinates[3]);
                //connectionList.Add(new Connection(x1, y1, x2, y2));

                sourceIndex = hiddenNeurons.IndexOf(source); //TODO change. computationally expensive
                if (sourceIndex == -1)                       //!hiddenNeurons.Contains(source))
                {
                    sourceIndex = hiddenNeurons.Count;
                    hiddenNeurons.Add(source);
                    neuronCount++;
                }

                targetIndex = hiddenNeurons.IndexOf(target);
                if (targetIndex == -1) //!hiddenNeurons.Contains(target)) hiddenNeurons.Add(target);
                {
                    targetIndex = hiddenNeurons.Count;
                    hiddenNeurons.Add(target);
                    neuronCount++;
                }

                weight = (float)(((Math.Abs(output) - (connectionThreshold)) / (1 - connectionThreshold)) * WEIGHT_RANGE * Math.Sign(output));
                //if (weight > 0.0) weight = 1.0f;
                //else weight = -1.0f;

                connections.Add(new ConnectionGene(counter++, (uint)(sourceIndex + inputCount + outputCount), (uint)(targetIndex + inputCount + outputCount), weight, ref connectionCoordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                //   }
            }

            _connections.Clear();

            //CONNECT TO OUTPUT
            foreach (PointF outputPos in outputNeuronPositions)
            {
                Rect startRec = new Rect(outputPos.X, outputPos.Y, false, null, 0, minX, minY, maxX, maxY, genome, stackNumber);
                startRec.createTree(sampleWidth);
                startRec.addPoints(ref _connections, ref sampleThreshold, ref neighborLevel, ref increaseResolutionThreshold, ref minDistance);

                //foreach (ExpressPoint p in _connections)
                //{
                //    targetX = (p.x1 + p.x2) / 2.0f;
                //    targetY = (p.y1 + p.y2) / 2.0f;
                //    //  if (targetY>input.Y)
                //    inputPos.Add(new PointF(targetX, targetY));
                //}
            }
            //GO DEEPER
            foreach (ExpressPoint t in _connections)
            {
                connectionCoordinates[0] = (float)(t.x1 + t.x2) / 2.0f;
                connectionCoordinates[1] = (float)(t.y1 + t.y2) / 2.0f;
                connectionCoordinates[2] = t.fixedx;
                connectionCoordinates[3] = t.fixedy;

                //          double dist = Math.Sqrt(Math.Pow(connectionCoordinates[0] - connectionCoordinates[2], 2) +
                // Math.Pow(connectionCoordinates[1] - connectionCoordinates[3], 2));

                if (float.IsNaN(t.activationLevel))
                {
                    Console.WriteLine("Normally this shouldn't happen");
                    return;
                    //
                }
                else
                {
                    output = t.activationLevel;
                }

                //!remove
                //recurrent = (connectionCoordinates[0] == connectionCoordinates[2]) && (connectionCoordinates[1] == connectionCoordinates[3]);
                //
                //    if ((Math.Abs(output) > connectionThreshold)) //&& (pcount % 10 ==0))
                //   {
                PointF source = new PointF(connectionCoordinates[0], connectionCoordinates[1]);
                PointF target = new PointF(connectionCoordinates[2], connectionCoordinates[3]);
                //connectionList.Add(new Connection(x1, y1, x2, y2));

                sourceIndex = hiddenNeurons.IndexOf(source);            //TODO change. computationally expensive

                //only connect if hidden neuron already exists
                if (sourceIndex != -1)
                {
                    if (sourceIndex == -1) //!hiddenNeurons.Contains(source))
                    {
                        //IF IT DOES NOT EXIST WE COULD POTENTIALLY STOP HERE
                        sourceIndex = hiddenNeurons.Count;
                        hiddenNeurons.Add(source);
                        neuronCount++;
                    }

                    targetIndex = outputNeuronPositions.IndexOf(target);
                    if (targetIndex == -1) //!hiddenNeurons.Contains(target)) hiddenNeurons.Add(target);
                    {
                        Console.WriteLine("SubstrateEvolution: This shouldn't happen");
                        //targetIndex = hiddenNeurons.Count;
                        //hiddenNeurons.Add(target);
                        //neuronCount++;
                    }

                    weight = (float)(((Math.Abs(output) - (connectionThreshold)) / (1 - connectionThreshold)) * WEIGHT_RANGE * Math.Sign(output));
                    //if (weight > 0.0) weight = 1.0f;
                    //else weight = -1.0f;

                    connections.Add(new ConnectionGene(counter++, (uint)(sourceIndex + inputCount + outputCount), (uint)(targetIndex + inputCount), weight, ref connectionCoordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                }
                // }
            }
        }
Beispiel #53
0
        public NeatGenome(uint genomeId,
                NeuronGeneList neuronGeneList,
                List<ModuleGene> moduleGeneList,
                ConnectionGeneList connectionGeneList,
                int inputNeuronCount,
                int outputNeuronCount,
                int outputsPerPolicy) // Schrum: Added
        {
			this.genomeId = genomeId;

			this.neuronGeneList = neuronGeneList;
            this.moduleGeneList = moduleGeneList;
			this.connectionGeneList = connectionGeneList;

			this.inputNeuronCount = inputNeuronCount;
            // Schrum: Removed (not used)
			//this.inputAndBiasNeuronCount = inputNeuronCount+1;
			this.outputNeuronCount = outputNeuronCount;
			
            // Schrum: Removed (not used)
            //this.inputBiasOutputNeuronCount = inputAndBiasNeuronCount + outputNeuronCount;
			//this.inputBiasOutputNeuronCountMinus2 = inputBiasOutputNeuronCount-2;
            
            // Schrum: Added
            this.outputsPerPolicy = outputsPerPolicy;

			Debug.Assert(connectionGeneList.IsSorted(), "ConnectionGeneList is not sorted by innovation ID");
		}
        public override NeatGenome generateGenome(INetwork network)
        {
            var coordinates = new double[8];
            int iterations  = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            // copy the neuron list to a new list and update the biases for hidden and output nodes
            NeuronGeneList newNeurons = new NeuronGeneList(neurons);

            foreach (NeuronGene gene in newNeurons)
            {
                if (gene.NeuronType == NeuronType.Output)
                {
                    gene.NeuronBias = 3; // GWM - Bias hardcoded to 3 for output neurons in Sebastian's CTRNN architecture
                    coordinates[2]  = 0;
                    coordinates[3]  = 0;
                    coordinates[6]  = 0;
                    coordinates[7]  = 0;
                    switch (gene.InnovationId)
                    {
                    case 4:
                        coordinates[0] = -1;
                        coordinates[1] = 1;
                        coordinates[4] = -1;
                        coordinates[5] = 1;
                        break;

                    case 5:
                        coordinates[0] = 0;
                        coordinates[1] = 1;
                        coordinates[4] = -1;
                        coordinates[5] = 1;
                        break;

                    case 6:
                        coordinates[0] = 1;
                        coordinates[1] = 1;
                        coordinates[4] = -1;
                        coordinates[5] = 1;
                        break;

                    case 7:
                        coordinates[0] = -1;
                        coordinates[1] = 1;
                        coordinates[4] = 1;
                        coordinates[5] = 1;
                        break;

                    case 8:
                        coordinates[0] = 0;
                        coordinates[1] = 1;
                        coordinates[4] = 1;
                        coordinates[5] = 1;
                        break;

                    case 9:
                        coordinates[0] = 1;
                        coordinates[1] = 1;
                        coordinates[4] = 1;
                        coordinates[5] = 1;
                        break;

                    case 10:
                        coordinates[0] = -1;
                        coordinates[1] = 1;
                        coordinates[4] = -1;
                        coordinates[5] = -1;
                        break;

                    case 11:
                        coordinates[0] = 0;
                        coordinates[1] = 1;
                        coordinates[4] = -1;
                        coordinates[5] = -1;
                        break;

                    case 12:
                        coordinates[0] = 1;
                        coordinates[1] = 1;
                        coordinates[4] = -1;
                        coordinates[5] = -1;
                        break;

                    case 13:
                        coordinates[0] = -1;
                        coordinates[1] = 1;
                        coordinates[4] = 1;
                        coordinates[5] = -1;
                        break;

                    case 14:
                        coordinates[0] = 0;
                        coordinates[1] = 1;
                        coordinates[4] = 1;
                        coordinates[5] = -1;
                        break;

                    case 15:
                        coordinates[0] = 1;
                        coordinates[1] = 1;
                        coordinates[4] = 1;
                        coordinates[5] = -1;
                        break;
                    }

                    float output;
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    output            = network.GetOutputSignal(3);
                    gene.TimeConstant = (output + 1) * 30 + 1; // normalize output to [1,61] for the time constant
                }
                if (gene.NeuronType == NeuronType.Hidden)
                {
                    coordinates[2] = 0;
                    coordinates[3] = 0;
                    coordinates[6] = 0;
                    coordinates[7] = 0;
                    switch (gene.InnovationId)
                    {
                    case 16:
                        coordinates[0] = -1;
                        coordinates[1] = 0;
                        coordinates[4] = -1;
                        coordinates[5] = 1;
                        break;

                    case 17:
                        coordinates[0] = 1;
                        coordinates[1] = 0;
                        coordinates[4] = -1;
                        coordinates[5] = 1;
                        break;

                    case 18:
                        coordinates[0] = -1;
                        coordinates[1] = 0;
                        coordinates[4] = 1;
                        coordinates[5] = 1;
                        break;

                    case 19:
                        coordinates[0] = 1;
                        coordinates[1] = 0;
                        coordinates[4] = 1;
                        coordinates[5] = 1;
                        break;

                    case 20:
                        coordinates[0] = -1;
                        coordinates[1] = 0;
                        coordinates[4] = -1;
                        coordinates[5] = -1;
                        break;

                    case 21:
                        coordinates[0] = 1;
                        coordinates[1] = 0;
                        coordinates[4] = -1;
                        coordinates[5] = -1;
                        break;

                    case 22:
                        coordinates[0] = -1;
                        coordinates[1] = 0;
                        coordinates[4] = 1;
                        coordinates[5] = -1;
                        break;

                    case 23:
                        coordinates[0] = 1;
                        coordinates[1] = 0;
                        coordinates[4] = 1;
                        coordinates[5] = -1;
                        break;
                    }

                    float output;
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    output            = network.GetOutputSignal(2);
                    gene.NeuronBias   = output;
                    output            = network.GetOutputSignal(3);
                    gene.TimeConstant = (output + 1) * 30 + 1; // normalize output to [1,61] for the time constant
                }
            }

            ConnectionGeneList connections = new ConnectionGeneList(88);
            uint connectionCounter         = 0;

            // intramodule connections for first subunit
            coordinates[4] = -1;
            coordinates[5] = 1;
            coordinates[6] = -1;
            coordinates[7] = 1;
            addModule(network, iterations, coordinates, 0, connections, connectionCounter);

            // intramodule connections for second subunit
            coordinates[4] = 1;
            coordinates[5] = 1;
            coordinates[6] = 1;
            coordinates[7] = 1;
            addModule(network, iterations, coordinates, 1, connections, connectionCounter);

            // intramodule connections for third subunit
            coordinates[4] = -1;
            coordinates[5] = -1;
            coordinates[6] = -1;
            coordinates[7] = -1;
            addModule(network, iterations, coordinates, 2, connections, connectionCounter);

            // intramodule connections for fourth subunit
            coordinates[4] = 1;
            coordinates[5] = -1;
            coordinates[6] = 1;
            coordinates[7] = -1;
            addModule(network, iterations, coordinates, 3, connections, connectionCounter);

            // intermodule connections
            // vertical connections
            coordinates[4] = -1;
            coordinates[5] = -1;
            coordinates[6] = -1;
            coordinates[7] = 1;

            coordinates[0] = -1;
            coordinates[1] = 1;
            coordinates[2] = -1;
            coordinates[3] = 0;
            addConnection(network, iterations, 10, 16, coordinates, true, connections, connectionCounter);

            coordinates[0] = 0;
            coordinates[1] = 1;
            coordinates[2] = 0;
            coordinates[3] = -1;
            addConnection(network, iterations, 11, 0, coordinates, true, connections, connectionCounter);

            coordinates[0] = 1;
            coordinates[1] = 1;
            coordinates[2] = 1;
            coordinates[3] = 0;
            addConnection(network, iterations, 12, 17, coordinates, true, connections, connectionCounter);

            coordinates[4] = -1;
            coordinates[5] = 1;
            coordinates[6] = -1;
            coordinates[7] = -1;

            coordinates[0] = -1;
            coordinates[1] = 0;
            coordinates[2] = -1;
            coordinates[3] = 1;
            addConnection(network, iterations, 16, 10, coordinates, true, connections, connectionCounter);

            coordinates[0] = 0;
            coordinates[1] = -1;
            coordinates[2] = 0;
            coordinates[3] = 1;
            addConnection(network, iterations, 0, 11, coordinates, true, connections, connectionCounter);

            coordinates[0] = 1;
            coordinates[1] = 0;
            coordinates[2] = 1;
            coordinates[3] = 1;
            addConnection(network, iterations, 17, 12, coordinates, true, connections, connectionCounter);

            coordinates[4] = 1;
            coordinates[5] = -1;
            coordinates[6] = 1;
            coordinates[7] = 1;

            coordinates[0] = -1;
            coordinates[1] = 1;
            coordinates[2] = -1;
            coordinates[3] = 0;
            addConnection(network, iterations, 13, 18, coordinates, true, connections, connectionCounter);

            coordinates[0] = 0;
            coordinates[1] = 1;
            coordinates[2] = 0;
            coordinates[3] = -1;
            addConnection(network, iterations, 14, 1, coordinates, true, connections, connectionCounter);

            coordinates[0] = 1;
            coordinates[1] = 1;
            coordinates[2] = 1;
            coordinates[3] = 0;
            addConnection(network, iterations, 15, 19, coordinates, true, connections, connectionCounter);

            coordinates[4] = 1;
            coordinates[5] = 1;
            coordinates[6] = 1;
            coordinates[7] = -1;

            coordinates[0] = -1;
            coordinates[1] = 0;
            coordinates[2] = -1;
            coordinates[3] = 1;
            addConnection(network, iterations, 18, 13, coordinates, true, connections, connectionCounter);

            coordinates[0] = 0;
            coordinates[1] = -1;
            coordinates[2] = 0;
            coordinates[3] = 1;
            addConnection(network, iterations, 1, 14, coordinates, true, connections, connectionCounter);

            coordinates[0] = 1;
            coordinates[1] = 0;
            coordinates[2] = 1;
            coordinates[3] = 1;
            addConnection(network, iterations, 19, 15, coordinates, true, connections, connectionCounter);

            // horizonal connections
            coordinates[4] = -1;
            coordinates[5] = 1;
            coordinates[6] = 1;
            coordinates[7] = 1;

            coordinates[0] = 1;
            coordinates[1] = 0;
            coordinates[2] = -1;
            coordinates[3] = 0;
            addConnection(network, iterations, 17, 18, coordinates, true, connections, connectionCounter);

            coordinates[0] = 1;
            coordinates[1] = 1;
            coordinates[2] = -1;
            coordinates[3] = 1;
            addConnection(network, iterations, 6, 7, coordinates, true, connections, connectionCounter);

            coordinates[4] = 1;
            coordinates[5] = 1;
            coordinates[6] = -1;
            coordinates[7] = 1;

            coordinates[0] = -1;
            coordinates[1] = 0;
            coordinates[2] = 1;
            coordinates[3] = 0;
            addConnection(network, iterations, 18, 17, coordinates, true, connections, connectionCounter);

            coordinates[0] = -1;
            coordinates[1] = 1;
            coordinates[2] = 1;
            coordinates[3] = 1;
            addConnection(network, iterations, 7, 6, coordinates, true, connections, connectionCounter);

            coordinates[4] = -1;
            coordinates[5] = -1;
            coordinates[6] = 1;
            coordinates[7] = -1;

            coordinates[0] = 1;
            coordinates[1] = 0;
            coordinates[2] = -1;
            coordinates[3] = 0;
            addConnection(network, iterations, 21, 22, coordinates, true, connections, connectionCounter);

            coordinates[0] = 1;
            coordinates[1] = 1;
            coordinates[2] = -1;
            coordinates[3] = 1;
            addConnection(network, iterations, 12, 13, coordinates, true, connections, connectionCounter);

            coordinates[4] = 1;
            coordinates[5] = -1;
            coordinates[6] = -1;
            coordinates[7] = -1;

            coordinates[0] = -1;
            coordinates[1] = 0;
            coordinates[2] = 1;
            coordinates[3] = 0;
            addConnection(network, iterations, 22, 21, coordinates, true, connections, connectionCounter);

            coordinates[0] = -1;
            coordinates[1] = 1;
            coordinates[2] = 1;
            coordinates[3] = 1;
            addConnection(network, iterations, 13, 12, coordinates, true, connections, connectionCounter);

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, newNeurons, connections, (int)inputCount, (int)outputCount));
        }
Beispiel #55
0
        public override NeatGenome.NeatGenome generateGenome(INetwork network = null)
        {
            uint connectionCounter         = 0;
            ConnectionGeneList connections = new ConnectionGeneList();

            /*
             * // Connect the R,G,B inputs to the RGB layer.
             * // R should encourage planting, while G and B should inhibit it.
             * for (uint x = 0; x < rgbOneDimension; x++)
             * {
             *  for (uint y = 0; y < rgbOneDimension; y++)
             *  {
             *      for (uint x2 = 0; x2 < rgbOneDimension; x2++)
             *      {
             *          for (uint y2 = 0; y2 < rgbOneDimension; y2++)
             *          {
             *              // R
             *              connections.Add(new ConnectionGene(connectionCounter++, x + rgbOneDimension * y, (x2 + rgbOneDimension * y2) + inputCount + outputCount, 1.0));
             *
             *              // G
             *              connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + colorArraySize, (x2 + rgbOneDimension * y2) + inputCount + outputCount, -1.0));
             *
             *              // B
             *              connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + 2 * colorArraySize, (x2 + rgbOneDimension * y2) + inputCount + outputCount, -1.0));
             *          }
             *      }
             *  }
             * }
             *
             * // Connect the heading inputs to the heading integration layer with weight 1.0
             * for (uint x = 0; x < headingX; x++)
             * {
             *  for (uint y = 0; y < headingY; y++)
             *  {
             *      for (uint x2 = 0; x2 < headingX; x2++)
             *      {
             *          for (uint y2 = 0; y2 < headingY; y2++)
             *          {
             *              connections.Add(new ConnectionGene(connectionCounter++, 3 * colorArraySize + (x + headingX * y), (x2 + headingX * y2) + inputCount + outputCount + colorArraySize, 1.0));
             *          }
             *      }
             *  }
             * }
             *
             * // Add the connections between the RGB hidden and output layers (but only the planting output, #4)
             * for (uint x = 0; x < rgbOneDimension; x++)
             * {
             *  for (uint y = 0; y < rgbOneDimension; y++)
             *  {
             *      connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + inputCount + outputCount, 313, 1.0));
             *  }
             * }
             *
             * // Add the connections between the heading hidden and output layers (but only the straight output, #2)
             * for (uint x = 0; x < headingX; x++)
             * {
             *  for (uint y = 0; y < headingY; y++)
             *  {
             *      connections.Add(new ConnectionGene(connectionCounter++, (x + headingX * y) + inputCount + outputCount + colorArraySize, 311, 1.0));
             *  }
             * }
             */

            // Return a genome combining the already-specified neurons with the CPPN-generated connections
            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
        private void addModule(INetwork network, int iterations, double[] coordinates, uint moduleOffset, ConnectionGeneList connections, uint connectionCounter)
        {
            // from input
            coordinates[0] = 0;
            coordinates[1] = -1;
            coordinates[2] = -1;
            coordinates[3] = 0;
            addConnection(network, iterations, 0 + moduleOffset, 16 + moduleOffset * 2, coordinates, false, connections, connectionCounter);

            coordinates[2] = 1;
            coordinates[3] = 0;
            addConnection(network, iterations, 0 + moduleOffset, 17 + moduleOffset * 2, coordinates, false, connections, connectionCounter);

            coordinates[2] = -1;
            coordinates[3] = 1;
            addConnection(network, iterations, 0 + moduleOffset, 4 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[2] = 0;
            coordinates[3] = 1;
            addConnection(network, iterations, 0 + moduleOffset, 5 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[2] = 1;
            coordinates[3] = 1;
            addConnection(network, iterations, 0 + moduleOffset, 6 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            // from first hidden
            coordinates[0] = -1;
            coordinates[1] = 0;
            coordinates[2] = -1;
            coordinates[3] = 1;
            addConnection(network, iterations, 16 + moduleOffset * 2, 4 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[2] = 0;
            coordinates[3] = 1;
            addConnection(network, iterations, 16 + moduleOffset * 2, 5 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[2] = 1;
            coordinates[3] = 1;
            addConnection(network, iterations, 16 + moduleOffset * 2, 6 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[2] = 1;
            coordinates[3] = 0;
            addConnection(network, iterations, 16 + moduleOffset * 2, 17 + moduleOffset * 2, coordinates, false, connections, connectionCounter);

            // from second hidden
            coordinates[0] = 1;
            coordinates[1] = 0;
            coordinates[2] = -1;
            coordinates[3] = 1;
            addConnection(network, iterations, 17 + moduleOffset * 2, 4 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[2] = 0;
            coordinates[3] = 1;
            addConnection(network, iterations, 17 + moduleOffset * 2, 5 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[2] = 1;
            coordinates[3] = 1;
            addConnection(network, iterations, 17 + moduleOffset * 2, 6 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[2] = -1;
            coordinates[3] = 0;
            addConnection(network, iterations, 17 + moduleOffset * 2, 16 + moduleOffset * 2, coordinates, false, connections, connectionCounter);

            // output to output connections
            coordinates[0] = -1;
            coordinates[1] = 1;
            coordinates[2] = 0;
            coordinates[3] = 1;
            addConnection(network, iterations, 4 + moduleOffset * 3, 5 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[0] = 0;
            coordinates[1] = 1;
            coordinates[2] = -1;
            coordinates[3] = 1;
            addConnection(network, iterations, 5 + moduleOffset * 3, 4 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[0] = 1;
            coordinates[1] = 1;
            coordinates[2] = 0;
            coordinates[3] = 1;
            addConnection(network, iterations, 6 + moduleOffset * 3, 5 + moduleOffset * 3, coordinates, false, connections, connectionCounter);

            coordinates[0] = 0;
            coordinates[1] = 1;
            coordinates[2] = 1;
            coordinates[3] = 1;
            addConnection(network, iterations, 5 + moduleOffset * 3, 6 + moduleOffset * 3, coordinates, false, connections, connectionCounter);
        }
        public void normalizeWeightConnections(ref ConnectionGeneList connections, int neuronCount)
        {
            double[] weightSumPos = new double[neuronCount];
            double[] weightSumNeg = new double[neuronCount];
            ////Normalize Connection Weights
            ////ONLY NORMALIZE WEIGHTS BETWEEN HIDDEN NEURONS
            for (int i = 0; i < connections.Count; i++)
            {

                if (connections[i].Weight >= 0.0f)
                {
                    weightSumPos[connections[i].TargetNeuronId] += Math.Abs(connections[i].Weight); //connections[i].weight; //Abs value?
                }
                else
                {
                    weightSumNeg[connections[i].TargetNeuronId] += Math.Abs(connections[i].Weight); //connections[i].weight; //Abs value?

                }

            }
            for (int i = 0; i < connections.Count; i++)
            {

                if (connections[i].Weight >= 0.0f)
                {
                    if (weightSumPos[connections[i].TargetNeuronId] != 0.0f)
                        connections[i].Weight /= weightSumPos[connections[i].TargetNeuronId];
                }
                else
                {
                    if (weightSumNeg[connections[i].TargetNeuronId] != 0.0f)
                        connections[i].Weight /= weightSumNeg[connections[i].TargetNeuronId];
                }
                connections[i].Weight *= 3.0;
            }
        }
        private void addConnection(INetwork network, int iterations, uint source, uint target, double[] coordinates, bool isInter, ConnectionGeneList connections, uint connectionCounter)
        {
            float output;

            network.ClearSignals();
            network.SetInputSignals(coordinates);
            network.MultipleSteps(iterations);
            if (isInter)
            {
                output = network.GetOutputSignal(1);
            }
            else
            {
                output = network.GetOutputSignal(0);
            }

            if (Math.Abs(output) > threshold)
            {
                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                connections.Add(new ConnectionGene(connectionCounter++, source, target, weight));
            }
        }
        private NeatGenome.NeatGenome generateHomogeneousGenomeES(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            List<PointF> hiddenNeuronPositions = new List<PointF>();

            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList();//(int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            List<PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List<PointF> inputNeuronPositions = getNeuronGroupByType(0);

            EvolvableSubstrate se = new EvolvableSubstrate();

            se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                HyperNEATParameters.initialRes,
                (float)HyperNEATParameters.varianceThreshold,
                (float)HyperNEATParameters.bandingThreshold,
                (int)HyperNEATParameters.ESIterations,
                (float)HyperNEATParameters.divisionThreshold,
                HyperNEATParameters.maximumRes,
                InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref connections, ref hiddenNeuronPositions);

            HiddenCount = (uint)hiddenNeuronPositions.Count;

            float[] coordinates = new float[5];
            uint connectionCounter = (uint)connections.Count;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < InputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            // set up the output nodes
            for (uint a = 0; a < OutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));

            }
            // set up the hidden nodes
            for (uint a = 0; a < HiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[] visited = new bool[neurons.Count];
            List<uint> nodeList = new List<uint>();
            bool[] connectedToInput = new bool[neurons.Count];

            bool[] isOutput = new bool[neurons.Count];

            bool danglingConnection = true;

            while (danglingConnection)
            {
                bool[] hasIncomming = new bool[neurons.Count];

                foreach (ConnectionGene co in connections)
                {
                    //  if (co.SourceNeuronId != co.TargetNeuronId)
                    // {
                    hasIncomming[co.TargetNeuronId] = true;
                    // }
                }
                for (int i = 0; i < InputCount; i++)
                    hasIncomming[i] = true;

                bool[] hasOutgoing = new bool[neurons.Count];
                foreach (ConnectionGene co in connections)
                {
                    //  if (co.TargetNeuronId != co.SourceNeuronId)
                    //  {
                    if (co.TargetNeuronId != co.SourceNeuronId)  //neurons that only connect to themselfs don't count
                    {
                        hasOutgoing[co.SourceNeuronId] = true;
                    }
                    //  }
                }

                //Keep  output neurons
                for (int i = 0; i < OutputCount; i++)
                    hasOutgoing[i + InputCount] = true;

                danglingConnection = false;
                //Check if there are still dangling connections
                foreach (ConnectionGene co in connections)
                {
                    if (!hasOutgoing[co.TargetNeuronId] || !hasIncomming[co.SourceNeuronId])
                    {
                        danglingConnection = true;
                        break;
                    }
                }

                connections.RemoveAll(delegate(ConnectionGene m) { return (!hasIncomming[m.SourceNeuronId]); });
                connections.RemoveAll(delegate(ConnectionGene m) { return (!hasOutgoing[m.TargetNeuronId]); });
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            SharpNeatLib.NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(InputCount), (int)(OutputCount));
            //     SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;

            return gn;
        }
        // for predator prey, ignore
        public NeatGenome generateMultiGenomeModulus(INetwork network, uint numberOfAgents)
        {
#if OUTPUT
            System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
#endif
            var   coordinates = new double[4];
            float output;
            uint  connectionCounter = 0;

            uint inputsPerAgent  = inputCount / numberOfAgents;
            uint hiddenPerAgent  = hiddenCount / numberOfAgents;
            uint outputsPerAgent = outputCount / numberOfAgents;

            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));

            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            coordinates[0] = -1 + inputDelta / 2.0f;    //x1
            coordinates[1] = -1;                        //y1
            coordinates[2] = -1 + hiddenDelta / 2.0f;   //x2
            coordinates[3] = 0;                         //y2

            for (uint agent = 0; agent < numberOfAgents; agent++)
            {
                coordinates[0] = -1 + (agent * inputsPerAgent * inputDelta) + inputDelta / 2.0f;
                for (uint source = 0; source < inputsPerAgent; source++, coordinates[0] += inputDelta)
                {
                    coordinates[2] = -1 + (agent * hiddenPerAgent * hiddenDelta) + hiddenDelta / 2.0f;
                    for (uint target = 0; target < hiddenPerAgent; target++, coordinates[2] += hiddenDelta)
                    {
                        //Since there are an equal number of input and hidden nodes, we check these everytime
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        ((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
                        output = network.GetOutputSignal(0);
#if OUTPUT
                        foreach (double d in inputs)
                        {
                            sw.Write(d + " ");
                        }
                        sw.Write(output);
                        sw.WriteLine();
#endif
                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, (agent * inputsPerAgent) + source, (agent * hiddenPerAgent) + target + inputCount + outputCount, weight));
                        }

                        //Since every other hidden node has a corresponding output node, we check every other time
                        if (target % 2 == 0)
                        {
                            network.ClearSignals();
                            coordinates[1] = 0;
                            coordinates[3] = 1;
                            network.SetInputSignals(coordinates);
                            ((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
                            output = network.GetOutputSignal(0);
#if OUTPUT
                            foreach (double d in inputs)
                            {
                                sw.Write(d + " ");
                            }
                            sw.Write(output);
                            sw.WriteLine();
#endif
                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, (agent * hiddenPerAgent) + source + inputCount + outputCount, ((outputsPerAgent * agent) + ((target) / 2)) + inputCount, weight));
                            }
                            coordinates[1] = -1;
                            coordinates[3] = 0;
                        }
                    }
                }
            }
#if OUTPUT
            sw.Flush();
#endif
            //Console.WriteLine(count);
            //Console.ReadLine();
            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }