Ejemplo n.º 1
0
        public override NeatGenome.NeatGenome GenerateGenome(INetwork cppn)
        {
            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));

            float[] coordinates = new float[6];
            float   output;
            uint    connectionCounter = 0;
            int     iterations        = 2 * (cppn.TotalNeuronCount - (cppn.InputNeuronCount + cppn.OutputNeuronCount)) + 1;

            // TODO:
            iterations = Math.Min(iterations, 4);

            coordinates[0] = -1 + inputDelta / 2.0f;
            coordinates[1] = -1;
            coordinates[2] = m_zValue;
            coordinates[3] = -1 + hiddenDelta / 2.0f;
            coordinates[4] = 0;
            coordinates[5] = m_zValue;

            for (uint source = 0; source < inputCount; source++, coordinates[0] += inputDelta)
            {
                coordinates[3] = -1 + hiddenDelta / 2.0f;
                for (uint target = 0; target < hiddenCount; target++, coordinates[3] += hiddenDelta)
                {
                    //Since there are an equal number of input and hidden nodes, we check these everytime
                    cppn.ClearSignals();
                    cppn.SetInputSignals(coordinates);
                    cppn.MultipleSteps(iterations);
                    output = cppn.GetOutputSignal(0);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, source, target + inputCount + outputCount, weight));
                    }

                    //Since every other hidden node has a corresponding output node, we check every other time
                    if (target % 2 == 0)
                    {
                        cppn.ClearSignals();
                        coordinates[1] = 0;
                        coordinates[4] = 1;
                        cppn.SetInputSignals(coordinates);
                        cppn.MultipleSteps(iterations);
                        output = cppn.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, (target / 2) + inputCount, weight));
                        }
                        coordinates[1] = -1;
                        coordinates[4] = 0;
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
        public static NeatGenome Read(XmlElement xmlGenome)
        {
            int inputNeuronCount  = 0;
            int outputNeuronCount = 0;

            uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));
            // Schrum: Retrieve this new property, which is saved to xml files now
            int outputsPerPolicy = int.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "outputsperpolicy"));

            //--- Read neuron genes into a list.
            NeuronGeneList neuronGeneList  = new NeuronGeneList();
            XmlNodeList    listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");

            foreach (XmlElement xmlNeuronGene in listNeuronGenes)
            {
                NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

                // Count the input and output neurons as we go.
                switch (neuronGene.NeuronType)
                {
                case NeuronType.Input:
                    inputNeuronCount++;
                    break;

                case NeuronType.Output:
                    outputNeuronCount++;
                    break;
                }

                neuronGeneList.Add(neuronGene);
            }

            //--- Read module genes into a list.
            List <ModuleGene> moduleGeneList  = new List <ModuleGene>();
            XmlNodeList       listModuleGenes = xmlGenome.SelectNodes("modules/module");

            foreach (XmlElement xmlModuleGene in listModuleGenes)
            {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

            //--- Read connection genes into a list.
            ConnectionGeneList connectionGeneList  = new ConnectionGeneList();
            XmlNodeList        listConnectionGenes = xmlGenome.SelectNodes("connections/connection");

            foreach (XmlElement xmlConnectionGene in listConnectionGenes)
            {
                connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
            }

            //return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            //return new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            // Schrum: Changed to include the outputs per policy
            return(new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount, outputsPerPolicy));
        }
		public static NeatGenome Read(XmlElement xmlGenome)
		{
			int inputNeuronCount=0;
			int outputNeuronCount=0;

			uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));

			//--- Read neuron genes into a list.
			NeuronGeneList neuronGeneList = new NeuronGeneList();
			XmlNodeList listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");
			foreach(XmlElement xmlNeuronGene in listNeuronGenes)
			{
				NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

				// Count the input and output neurons as we go.
				switch(neuronGene.NeuronType)
				{
					case NeuronType.Input:
						inputNeuronCount++;
						break;
					case NeuronType.Output:
						outputNeuronCount++;
						break;
				}

				neuronGeneList.Add(neuronGene);
			}

            //--- Read module genes into a list.
            List<ModuleGene> moduleGeneList = new List<ModuleGene>();
            XmlNodeList listModuleGenes = xmlGenome.SelectNodes("modules/module");
            foreach (XmlElement xmlModuleGene in listModuleGenes) {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

			//--- Read connection genes into a list.
			ConnectionGeneList connectionGeneList = new ConnectionGeneList();
			XmlNodeList listConnectionGenes = xmlGenome.SelectNodes("connections/connection");
			foreach(XmlElement xmlConnectionGene in listConnectionGenes)
				connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
			
			//return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            NeatGenome g = new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            g.Behavior = ReadBehavior(xmlGenome.SelectSingleNode("behavior"));
            g.Behavior.objectives = new double[6];
            g.objectives = new double[6];


            // JUSTIN: Read grid/trajectory info
            g.GridCoords = ReadGrid(xmlGenome.SelectSingleNode("grid"));
            g.Behavior.trajectory = ReadTrajectory(xmlGenome.SelectSingleNode("trajectory"));

            return g;
		}
Ejemplo n.º 4
0
        public static NeatGenome Read(XmlElement xmlGenome)
        {
            int inputNeuronCount  = 0;
            int outputNeuronCount = 0;

            long id = long.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));

            //--- Read neuron genes into a list.
            NeuronGeneList neuronGeneList  = new NeuronGeneList();
            XmlNodeList    listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");

            foreach (XmlElement xmlNeuronGene in listNeuronGenes)
            {
                NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

                // Count the input and output neurons as we go.
                switch (neuronGene.NeuronType)
                {
                case NeuronType.Input:
                    inputNeuronCount++;
                    break;

                case NeuronType.Output:
                    outputNeuronCount++;
                    break;
                }

                neuronGeneList.Add(neuronGene);
            }

            //--- Read module genes into a list.
            List <ModuleGene> moduleGeneList  = new List <ModuleGene>();
            XmlNodeList       listModuleGenes = xmlGenome.SelectNodes("modules/module");

            foreach (XmlElement xmlModuleGene in listModuleGenes)
            {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

            //--- Read connection genes into a list.
            ConnectionGeneList connectionGeneList  = new ConnectionGeneList();
            XmlNodeList        listConnectionGenes = xmlGenome.SelectNodes("connections/connection");

            foreach (XmlElement xmlConnectionGene in listConnectionGenes)
            {
                connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
            }

            //return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            return(new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount));
        }
Ejemplo n.º 5
0
        public NeatGenome.NeatGenome generatePerceptronCircle(INetwork network, bool distance)
        {
            ConnectionGeneList connections = new ConnectionGeneList((int)(inputCount * outputCount));

            double[] inputs;
            if (distance)//|| angle)
            {
                inputs = new double[5];
            }
            else
            {
                inputs = new double[4];
            }
            double output;
            uint   counter          = 0;
            double inputAngleDelta  = (2 * Math.PI) / inputCount;
            double outputAngleDelta = (2 * Math.PI) / outputCount;

            double angleFrom = -3 * Math.PI / 4;

            for (uint neuronFrom = 0; neuronFrom < inputCount; neuronFrom++, angleFrom += inputAngleDelta)
            {
                inputs[0] = .5 * Math.Cos(angleFrom + (inputAngleDelta / 2.0));
                inputs[1] = .5 * Math.Sin(angleFrom + (inputAngleDelta / 2.0));
                double angleTo = -3 * Math.PI / 4;
                for (uint neuronTo = 0; neuronTo < outputCount; neuronTo++, angleTo += outputAngleDelta)
                {
                    inputs[2] = Math.Cos(angleTo + (outputAngleDelta / 2.0));
                    inputs[3] = Math.Sin(angleTo + (outputAngleDelta / 2.0));
                    //if(angle)
                    //inputs[4] = Math.Abs(angleFrom - angleTo);
                    if (distance)
                    {
                        inputs[4] = ((Math.Sqrt(Math.Pow(inputs[0] - inputs[2], 2) + Math.Pow(inputs[1] - inputs[3], 2)) / (2 * sqrt2)));
                    }
                    network.ClearSignals();
                    network.SetInputSignals(inputs);
                    network.MultipleSteps(5);
                    output = network.GetOutputSignal(0);
                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(counter++, neuronFrom, neuronTo + inputCount, weight));
                    }
                }
            }
            NeatGenome.NeatGenome g = new NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
            return(g);
        }
Ejemplo n.º 6
0
        public NeatGenome.NeatGenome generatePerceptronPattern(INetwork network, bool distance)
        {
            ConnectionGeneList connections = new ConnectionGeneList((int)(inputCount * outputCount));

            double[] inputs;
            if (distance)
            {
                inputs = new double[5];
            }
            else
            {
                inputs = new double[4];
            }
            //for this particular config, these inputs will never change so just set them now
            inputs[1] = 1;
            inputs[3] = -1;
            uint   counter = 0;
            double output;
            double x1 = -1, x2 = -1;

            double inputDelta  = (2.0 / (inputCount - 1));
            double outputDelta = (2.0 / (outputCount - 1));

            for (uint nodeFrom = 0; nodeFrom < inputCount; nodeFrom++, x1 += inputDelta)
            {
                inputs[0] = x1;
                x2        = -1;
                for (uint nodeTo = 0; nodeTo < outputCount; nodeTo++, x2 += outputDelta)
                {
                    inputs[2] = x2;
                    if (distance)
                    {
                        inputs[4] = ((Math.Sqrt(Math.Pow(inputs[0] - inputs[2], 2) + Math.Pow(inputs[1] - inputs[3], 2)) / (2 * sqrt2)));
                    }
                    network.ClearSignals();
                    network.SetInputSignals(inputs);
                    //currenly assuming a depth no greater than 5
                    network.MultipleSteps(5);
                    output = network.GetOutputSignal(0);
                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(counter++, nodeFrom, nodeTo + inputCount, weight));
                    }
                }
            }
            NeatGenome.NeatGenome g = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
            return(g);
        }
		public static NeatGenome Read(XmlElement xmlGenome)
		{
			int inputNeuronCount=0;
			int outputNeuronCount=0;

			uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));
            // Schrum: Retrieve this new property, which is saved to xml files now
            int outputsPerPolicy = int.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "outputsperpolicy"));

			//--- Read neuron genes into a list.
			NeuronGeneList neuronGeneList = new NeuronGeneList();
			XmlNodeList listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");
			foreach(XmlElement xmlNeuronGene in listNeuronGenes)
			{
				NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

				// Count the input and output neurons as we go.
				switch(neuronGene.NeuronType)
				{
					case NeuronType.Input:
						inputNeuronCount++;
						break;
					case NeuronType.Output:
						outputNeuronCount++;
						break;
				}

				neuronGeneList.Add(neuronGene);
			}

            //--- Read module genes into a list.
            List<ModuleGene> moduleGeneList = new List<ModuleGene>();
            XmlNodeList listModuleGenes = xmlGenome.SelectNodes("modules/module");
            foreach (XmlElement xmlModuleGene in listModuleGenes) {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

			//--- Read connection genes into a list.
			ConnectionGeneList connectionGeneList = new ConnectionGeneList();
			XmlNodeList listConnectionGenes = xmlGenome.SelectNodes("connections/connection");
			foreach(XmlElement xmlConnectionGene in listConnectionGenes)
				connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
			
			//return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            //return new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            // Schrum: Changed to include the outputs per policy
            return new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount, outputsPerPolicy);
        }
Ejemplo n.º 8
0
        public override NeatGenome generateGenome(INetwork network)
        {
            // copy the neuron list to a new list and update the x/y values
            NeuronGeneList newNeurons = new NeuronGeneList(neurons);

            // set the x and y value of the SUPGs
            foreach (NeuronGene neuron in newNeurons)
            {
                if (neuron.NeuronType == NeuronType.Hidden)
                {
                    // switch to grid substrate configuration
                    neuron.XValue = getXPos2(neuron.InnovationId - 16);
                    neuron.YValue = getYPos2(neuron.InnovationId - 16);
                }
            }

            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));

            float[] coordinates = new float[5];
            //float output;
            uint connectionCounter = 0;
            int  iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            // connect hidden layer to outputs
            for (uint source = 0; source < hiddenCount; source++)
            {
                coordinates[0] = getXPos(source, false);
                coordinates[1] = getYPos(source, false);

                for (uint target = 0; target < outputCount; target++)
                {
                    // only connect hidden nodes to their single nearest output
                    if (source == target)
                    {
                        coordinates[2] = getXPos(target, true);
                        coordinates[3] = getYPos(target, true);

                        // GWM - fixing weight to 1 for SUPG producing motor outputs
                        connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, target + inputCount, 1));
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, newNeurons, connections, (int)inputCount, (int)outputCount));
        }
Ejemplo n.º 9
0
    void QueryConnection(INetwork network, ConnectionGeneList connections, uint connectionCounter, uint neuron1id, uint neuron2id, NeuronGeneList newNeurons)
    {
        network.ClearSignals();
        //network.SetInputSignal(0, 1);
        network.SetInputSignal(0, newNeurons[(int)neuron1id].XValue);
        network.SetInputSignal(1, newNeurons[(int)neuron1id].YValue);
        network.SetInputSignal(2, newNeurons[(int)neuron2id].XValue);
        network.SetInputSignal(3, newNeurons[(int)neuron2id].YValue);
        network.SetInputSignal(4, 1);
        network.MultipleSteps(10);

        float output = network.GetOutputSignal(0);

        if (Math.Abs(output) > threshold)
        {
            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
            connections.Add(new ConnectionGene(connectionCounter, neuron1id, neuron2id, weight));
        }
    }
Ejemplo n.º 10
0
        private void addConnection(INetwork network, int iterations, uint source, uint target, double[] coordinates, bool isInter, ConnectionGeneList connections, uint connectionCounter)
        {
            float output;

            network.ClearSignals();
            network.SetInputSignals(coordinates);
            network.MultipleSteps(iterations);
            if (isInter)
            {
                output = network.GetOutputSignal(1);
            }
            else
            {
                output = network.GetOutputSignal(0);
            }

            if (Math.Abs(output) > threshold)
            {
                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                connections.Add(new ConnectionGene(connectionCounter++, source, target, weight));
            }
        }
        public static NeatGenome Read(XmlElement xmlGenome)
        {
            int inputNeuronCount=0;
            int outputNeuronCount=0;

            uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));

            //--- Read neuron genes into a list.
            NeuronGeneList neuronGeneList = new NeuronGeneList();
            XmlNodeList listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");
            foreach(XmlElement xmlNeuronGene in listNeuronGenes)
            {
                NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

                // Count the input and output neurons as we go.
                switch(neuronGene.NeuronType)
                {
                    case NeuronType.Input:
                        inputNeuronCount++;
                        break;
                    case NeuronType.Output:
                        outputNeuronCount++;
                        break;
                }

                neuronGeneList.Add(neuronGene);
            }

            //--- Read connection genes into a list.
            ConnectionGeneList connectionGeneList = new ConnectionGeneList();
            XmlNodeList listConnectionGenes = xmlGenome.SelectNodes("connections/connection");
            foreach(XmlElement xmlConnectionGene in listConnectionGenes)
                connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));

            return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
        }
        public NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet, bool dirComm)
        {
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount) +
                numberOfAgents * (ReceiveCount * HiddenCount) + numberOfAgents * (HiddenCount * TransCount)));
            float[] coordinates = new float[5];
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;
            uint totalTransCount = TransCount * numberOfAgents;
            uint totalReceiveCount = ReceiveCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden|receive|transmit
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents + ReceiveCount * numberOfAgents + TransCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }
            // set up the receive nodes
            for (uint a = 0; a < totalReceiveCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents, NeuronType.Receive, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the transmit nodes
            for (uint a = 0; a < totalTransCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents + ReceiveCount * numberOfAgents, NeuronType.Transmit, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount + totalReceiveCount + totalTransCount];

            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                    case 3: sourceID = totalInputCount + totalOutputCount + totalHiddenCount + (agent * ReceiveCount) + ng.GlobalID + sourceCount; break; //Receive
                                    case 4: sourceID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + ng.GlobalID + sourceCount; break; //Transmit
                                }

                                switch (connectedNG.GroupType)
                                {
                                    case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                    case 3: targetID = totalInputCount + totalOutputCount + totalHiddenCount + (agent * ReceiveCount) + connectedNG.GlobalID + targetCout; break;
                                    case 4: targetID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //target node bias
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID] = true;
                                }

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A = network.GetOutputSignal(2);
                                    B = network.GetOutputSignal(3);
                                    C = network.GetOutputSignal(4);
                                    D = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            //Add Direct Communication connections
            if (dirComm)
            {
                uint numConnected = ReceiveCount / TransCount;
                agent = 0;

                foreach (float stackCoordinate in stackCoordinates)
                {
                    SortedList<float, uint> closestAgents = new SortedList<float, uint>();
                    uint i = 0;
                    foreach (float otherCoordinate in stackCoordinates)
                    {
                        if (i == agent) continue;
                        float delta = Math.Abs(stackCoordinate - otherCoordinate);
                        closestAgents.Add(delta, i);
                        i++;
                    }
                    uint[] orderedAgents = new uint[numberOfAgents];
                    closestAgents.Values.CopyTo(orderedAgents, 0);
                    uint[] connectedAgents = new uint[numConnected];
                    for (uint j = 0; j < numConnected; j++)
                    {
                        connectedAgents[j] = orderedAgents[j];
                    }

                    foreach (NeuronGroup ng in neuronGroups)
                    {
                        if (ng.GroupType != 4) continue;
                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {
                            uint sourceID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + ng.GlobalID + sourceCount;

                            foreach (uint connectedAgent in connectedAgents)
                            {
                                uint targetID = totalInputCount + totalOutputCount + totalHiddenCount + (connectedAgent * ReceiveCount) + (ng.GlobalID * numConnected) + agent;

                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, 1.0));
                            }

                            sourceCount++;
                        }
                    }

                    agent++;
                }

            }

            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
Ejemplo n.º 13
0
		private void EnsureMutableConnectionGeneList()
		{
			if(mutableConnectionGeneList!=null)
				return;

			mutableConnectionGeneList = new ConnectionGeneList();

			int bound = connectionGeneList.Count;
			for(int i=0; i<bound; i++)
			{
				ConnectionGene connectionGene = connectionGeneList[i];
				if(!connectionGene.FixedWeight)
					mutableConnectionGeneList.Add(connectionGene);
			}
		}
        public NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List <float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));

            float[] coordinates = new float[5];
            float   output;
            uint    connectionCounter = 0;
            float   agentDelta        = 2.0f / (numberOfAgents - 1);
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount  = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            // Schrum: debugging

            /*
             * Console.WriteLine("generateMultiGenomeStack");
             * Console.WriteLine("numberOfAgents:" + numberOfAgents);
             * Console.WriteLine("totalOutputCount:" + totalOutputCount);
             * Console.WriteLine("totalInputCount:" + totalInputCount);
             */

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            uint  agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {
                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                                       //Input

                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                    //Output

                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break; //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;

                                case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;

                                case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //target node bias
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID]    = true;
                                }

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A            = network.GetOutputSignal(2);
                                    B            = network.GetOutputSignal(3);
                                    C            = network.GetOutputSignal(4);
                                    D            = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo       = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                {
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable  = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;

            // Schrum: debugging
            //Console.WriteLine("sng.InputNeuronCount:" + sng.InputNeuronCount);
            //Console.WriteLine("sng.OutputNeuronCount:" + sng.OutputNeuronCount);

            return(sng);
        }
Ejemplo n.º 15
0
        /*
         * The main method that generations a list of ANN connections based on the information in the
         * underlying hypercube.
         * Input : CPPN, InputPositions, OutputPositions, ES-HyperNEAT parameters
         * Output: Connections, HiddenNodes
         */
        public void generateSubstrate(List<PointF> inputNeuronPositions, List<PointF> outputNeuronPositions,
            INetwork genome, int initialDepth, float varianceThreshold, float bandThreshold, int ESIterations,
                                                float divsionThreshold, int maxDepth,
                                                uint inputCount, uint outputCount,
                                                ref  ConnectionGeneList connections, ref List<PointF> hiddenNeurons, bool useLeo = false)
        {
            List<TempConnection> tempConnections = new List<TempConnection>();
            int sourceIndex, targetIndex = 0;
            uint counter = 0;

            this.genome = (ModularNetwork)genome;
            this.initialDepth = initialDepth;
            this.maxDepth = maxDepth;
            this.varianceThreshold = varianceThreshold;
            this.bandThrehold = bandThreshold;
            this.divisionThreshold = divsionThreshold;

            //CONNECTIONS DIRECTLY FROM INPUT NODES
            sourceIndex = 0;
            foreach (PointF input in inputNeuronPositions)
            {
                // Analyze outgoing connectivity pattern from this input
                QuadPoint root = QuadTreeInitialisation(input.X, input.Y, true, (int)initialDepth, (int)maxDepth);
                tempConnections.Clear();
                // Traverse quadtree and add connections to list
                PruneAndExpress(input.X, input.Y, ref tempConnections, root, true, maxDepth);

                foreach (TempConnection p in tempConnections)
                {
                    PointF newp = new PointF(p.x2, p.y2);

                    targetIndex = hiddenNeurons.IndexOf(newp);
                    if (targetIndex == -1)
                    {
                        targetIndex = hiddenNeurons.Count;
                        hiddenNeurons.Add(newp);
                    }
                    connections.Add(new ConnectionGene(counter++, (sourceIndex), (targetIndex + inputCount + outputCount), p.weight * HyperNEATParameters.weightRange, new float[] {p.x1,p.y1,p.x2,p.y2}, p.Outputs));

                }
                sourceIndex++;
            }

            tempConnections.Clear();

            List<PointF> unexploredHiddenNodes = new List<PointF>();
            unexploredHiddenNodes.AddRange(hiddenNeurons);

            for (int step = 0; step < ESIterations; step++)
            {
                foreach (PointF hiddenP in unexploredHiddenNodes)
                {
                    tempConnections.Clear();
                    QuadPoint root = QuadTreeInitialisation(hiddenP.X, hiddenP.Y, true, (int)initialDepth, (int)maxDepth);
                    PruneAndExpress(hiddenP.X, hiddenP.Y, ref tempConnections, root, true, maxDepth);

                    sourceIndex = hiddenNeurons.IndexOf(hiddenP);   //TODO there might a computationally less expensive way

                    foreach (TempConnection p in tempConnections)
                    {

                        PointF newp = new PointF(p.x2, p.y2);

                        targetIndex = hiddenNeurons.IndexOf(newp);
                        if (targetIndex == -1)
                        {
                            targetIndex = hiddenNeurons.Count;
                            hiddenNeurons.Add(newp);

                        }
                        connections.Add(new ConnectionGene(counter++, (sourceIndex + inputCount + outputCount), (targetIndex + inputCount + outputCount), p.weight * HyperNEATParameters.weightRange, new float[] { p.x1, p.y1, p.x2, p.y2 }, p.Outputs));
                    }
                }
                // Remove the just explored nodes
                List<PointF> temp = new List<PointF>();
                temp.AddRange(hiddenNeurons);
                foreach (PointF f in unexploredHiddenNodes)
                    temp.Remove(f);

                unexploredHiddenNodes = temp;

            }

            tempConnections.Clear();

            //CONNECT TO OUTPUT
            targetIndex = 0;
            foreach (PointF outputPos in outputNeuronPositions)
            {
                // Analyze incoming connectivity pattern to this output
                QuadPoint root = QuadTreeInitialisation(outputPos.X, outputPos.Y, false, (int)initialDepth, (int)maxDepth);
                tempConnections.Clear();
                PruneAndExpress(outputPos.X, outputPos.Y, ref tempConnections, root, false, maxDepth);

                PointF target = new PointF(outputPos.X, outputPos.Y);

                foreach (TempConnection t in tempConnections)
                {
                    PointF source = new PointF(t.x1, t.y1);
                    sourceIndex = hiddenNeurons.IndexOf(source);

                    /* New nodes not created here because all the hidden nodes that are
                        connected to an input/hidden node are already expressed. */
                    if (sourceIndex != -1)  //only connect if hidden neuron already exists
                        connections.Add(new ConnectionGene(counter++, (sourceIndex + inputCount + outputCount), (targetIndex + inputCount), t.weight * HyperNEATParameters.weightRange, new float[] { t.x1, t.y1, t.x2, t.y2 }, t.Outputs));
                }
                targetIndex++;
            }
        }
Ejemplo n.º 16
0
		/// <summary>
		/// Create a default minimal genome that describes a NN with the given number of inputs and outputs.
		/// </summary>
		/// <returns></returns>
		public static IGenome CreateGenome(NeatParameters neatParameters, IdGenerator idGenerator, int inputNeuronCount, int outputNeuronCount, int outputsPerPolicy, float connectionProportion)
		{
            IActivationFunction actFunct;
			NeuronGene neuronGene; // temp variable.
			NeuronGeneList inputNeuronGeneList = new NeuronGeneList(); // includes bias neuron.
			NeuronGeneList outputNeuronGeneList = new NeuronGeneList();
			NeuronGeneList neuronGeneList = new NeuronGeneList();
			ConnectionGeneList connectionGeneList = new ConnectionGeneList();

			// IMPORTANT NOTE: The neurons must all be created prior to any connections. That way all of the genomes
			// will obtain the same innovation ID's for the bias,input and output nodes in the initial population.
			// Create a single bias neuron.
            //TODO: DAVID proper activation function change to NULL?
            actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
            //neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Bias, actFunct);
            neuronGene = new NeuronGene(null, idGenerator.NextInnovationId, NeuronGene.INPUT_LAYER, NeuronType.Bias, actFunct);
			inputNeuronGeneList.Add(neuronGene);
			neuronGeneList.Add(neuronGene);

			// Create input neuron genes.
            actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
			for(int i=0; i<inputNeuronCount; i++)
			{
                //TODO: DAVID proper activation function change to NULL?
                //neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Input, actFunct);
                neuronGene = new NeuronGene(null, idGenerator.NextInnovationId, NeuronGene.INPUT_LAYER, NeuronType.Input, actFunct);
				inputNeuronGeneList.Add(neuronGene);
				neuronGeneList.Add(neuronGene);
			}

			// Create output neuron genes. 
            //actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
			for(int i=0; i<outputNeuronCount; i++)
			{
                actFunct = ActivationFunctionFactory.GetActivationFunction("BipolarSigmoid");
                //actFunct = ActivationFunctionFactory.GetRandomActivationFunction(neatParameters);
                //TODO: DAVID proper activation function
                //neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Output, actFunct);
                neuronGene = new NeuronGene(null, idGenerator.NextInnovationId, NeuronGene.OUTPUT_LAYER, NeuronType.Output, actFunct);
				outputNeuronGeneList.Add(neuronGene);
				neuronGeneList.Add(neuronGene);
			}

			// Loop over all possible connections from input to output nodes and create a number of connections based upon
			// connectionProportion.
			foreach(NeuronGene targetNeuronGene in outputNeuronGeneList)
			{
				foreach(NeuronGene sourceNeuronGene in inputNeuronGeneList)
				{
					// Always generate an ID even if we aren't going to use it. This is necessary to ensure connections
					// between the same neurons always have the same ID throughout the generated population.
					uint connectionInnovationId = idGenerator.NextInnovationId;

					if(Utilities.NextDouble() < connectionProportion)
					{	// Ok lets create a connection.
						connectionGeneList.Add(	new ConnectionGene(connectionInnovationId, 
							sourceNeuronGene.InnovationId,
							targetNeuronGene.InnovationId,
							(Utilities.NextDouble() * neatParameters.connectionWeightRange ) - neatParameters.connectionWeightRange/2.0));  // Weight 0 +-5
					}
				}
			}

            // Don't create any hidden nodes at this point. Fundamental to the NEAT way is to start minimally!
            // Schrum: Added outputsPerPolicy: If outputsPerPolicy == outputNeuronCount, then behaves like default NEAT
            return new NeatGenome(idGenerator.NextGenomeId, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount, outputsPerPolicy);
		}
Ejemplo n.º 17
0
        public virtual NeatGenome.NeatGenome generateGenome(INetwork network)
        {
            float[] coordinates = new float[4];
            float output;
            uint connectionCounter = 0;
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            ConnectionGeneList connections=new ConnectionGeneList();
            if (hiddenCount > 0)
            {
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                coordinates[3] = 0;
                for (uint input = 0; input < inputCount; input++, coordinates[0] += inputDelta)
                {
                    coordinates[2] = -1 + hiddenDelta / 2.0f;
                    for (uint hidden = 0; hidden < hiddenCount; hidden++, coordinates[2] += hiddenDelta)
                    {
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, hidden + inputCount + outputCount, weight));
                        }
                    }
                }
                coordinates[0] = -1 + hiddenDelta / 2.0f;
                coordinates[1] = 0;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint hidden = 0; hidden < hiddenCount; hidden++, coordinates[0] += hiddenDelta)
                {
                    coordinates[2] = -1 + outputDelta / 2.0f;
                    for (uint outputs = 0; outputs < outputCount; outputs++, coordinates[2] += outputDelta)
                    {
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, hidden + inputCount + outputCount, outputs + inputCount, weight));
                        }
                    }
                }
            }
            else
            {
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint input = 0; input < inputCount; input++, coordinates[0] += inputDelta)
                {
                    coordinates[2] = -1 + outputDelta / 2.0f;
                    for (uint outputs = 0; outputs < outputCount; outputs++, coordinates[2] += outputDelta)
                    {
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, outputs + inputCount, weight));
                        }
                    }
                }
            }
            return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
        }
        /// <summary>
        ///     Creates a single randomly initialised genome.
        ///     A random set of connections are made form the input to the output neurons, the number of
        ///     connections made is based on the NeatGenomeParameters.InitialInterconnectionsProportion
        ///     which specifies the proportion of all posssible input-output connections to be made in
        ///     initial genomes.
        ///     The connections that are made are allocated innovation IDs in a consistent manner across
        ///     the initial population of genomes. To do this we allocate IDs sequentially to all possible
        ///     interconnections and then randomly select some proportion of connections for inclusion in the
        ///     genome. In addition, for this scheme to work the innovation ID generator must be reset to zero
        ///     prior to each call to CreateGenome(), and a test is made to ensure this is the case.
        ///     The consistent allocation of innovation IDs ensure that equivalent connections in different
        ///     genomes have the same innovation ID, and although this isn't strictly necessary it is
        ///     required for sexual reproduction to work effectively - like structures are detected by comparing
        ///     innovation IDs only.
        /// </summary>
        /// <param name="birthGeneration">
        ///     The current evolution algorithm generation.
        ///     Assigned to the new genome as its birth generation.
        /// </param>
        public override NeatGenome CreateGenome(uint birthGeneration)
        {
            NeuronGeneList neuronGeneList = new NeuronGeneList(_inputNeuronCount + _outputNeuronCount + _hiddenNeuronCount);
            NeuronGeneList inputNeuronGeneList = new NeuronGeneList(_inputNeuronCount); // includes single bias neuron.
            NeuronGeneList outputNeuronGeneList = new NeuronGeneList(_outputNeuronCount);
            NeuronGeneList hiddenNeuronGeneList = new NeuronGeneList(_hiddenNeuronCount);

            // Create a single bias neuron.
            uint biasNeuronId = _innovationIdGenerator.NextId;
            if (0 != biasNeuronId)
            {
                // The ID generator must be reset before calling this method so that all generated genomes use the
                // same innovation ID for matching neurons and structures.
                throw new SharpNeatException("IdGenerator must be reset before calling CreateGenome(uint)");
            }

            // Note. Genes within nGeneList must always be arranged according to the following layout plan.
            //   Bias - single neuron. Innovation ID = 0
            //   Input neurons.
            //   Output neurons.
            //   Hidden neurons.
            NeuronGene neuronGene = CreateNeuronGene(biasNeuronId, NodeType.Bias);
            inputNeuronGeneList.Add(neuronGene);
            neuronGeneList.Add(neuronGene);

            // Create input neuron genes.
            for (int i = 0; i < _inputNeuronCount; i++)
            {
                neuronGene = CreateNeuronGene(_innovationIdGenerator.NextId, NodeType.Input);
                inputNeuronGeneList.Add(neuronGene);
                neuronGeneList.Add(neuronGene);
            }

            // Create output neuron genes.
            for (int i = 0; i < _outputNeuronCount; i++)
            {
                neuronGene = CreateNeuronGene(_innovationIdGenerator.NextId, NodeType.Output);
                outputNeuronGeneList.Add(neuronGene);
                neuronGeneList.Add(neuronGene);
            }

            // Create hidden neuron genes.
            for (int i = 0; i < _hiddenNeuronCount; i++)
            {
                neuronGene = CreateNeuronGene(_innovationIdGenerator.NextId, NodeType.Hidden);
                hiddenNeuronGeneList.Add(neuronGene);
                neuronGeneList.Add(neuronGene);
            }

            // Define all possible connections between the input and hidden neurons and the hidden and output neurons
            // (fully interconnected with minimal hidden/feature layer).
            int srcCount = inputNeuronGeneList.Count;
            int tgtCount = outputNeuronGeneList.Count;
            int hdnCount = hiddenNeuronGeneList.Count;
            ConnectionDefinition[] srcHdnConnectionDefArr = new ConnectionDefinition[srcCount * hdnCount];
            ConnectionDefinition[] hdnTgtConnectionDefArr = new ConnectionDefinition[tgtCount * hdnCount];

            for (int hdnIdx = 0, i = 0; hdnIdx < hdnCount; hdnIdx++)
            {
                for (int srcIdx = 0; srcIdx < srcCount; srcIdx++)
                {
                    srcHdnConnectionDefArr[i++] = new ConnectionDefinition(_innovationIdGenerator.NextId, srcIdx, hdnIdx);
                }
            }

            for (int hdnIdx = 0, i = 0; hdnIdx < hdnCount; hdnIdx++)
            {
                for (int tgtIdx = 0; tgtIdx < tgtCount; tgtIdx++)
                {
                    hdnTgtConnectionDefArr[i++] = new ConnectionDefinition(_innovationIdGenerator.NextId, hdnIdx, tgtIdx);
                }
            }

            // Shuffle the array of possible connections.
            Utilities.Shuffle(srcHdnConnectionDefArr, _rng);
            Utilities.Shuffle(hdnTgtConnectionDefArr, _rng);

            // Select connection definitions from the head of the list and convert them to real connections.
            // We want some proportion of all possible connections but at least one (Connectionless genomes are not allowed).
            int srcConnectionCount = (int) Utilities.ProbabilisticRound(
                srcHdnConnectionDefArr.Length*_neatGenomeParamsComplexifying.InitialInterconnectionsProportion,
                _rng);
            srcConnectionCount = Math.Max(1, srcConnectionCount);

            int tgtConnectionCount = (int)Utilities.ProbabilisticRound(
                hdnTgtConnectionDefArr.Length * _neatGenomeParamsComplexifying.InitialInterconnectionsProportion,
                _rng);
            tgtConnectionCount = Math.Max(1, tgtConnectionCount);

            // Create the connection gene list and populate it.
            ConnectionGeneList connectionGeneList = new ConnectionGeneList(srcConnectionCount + tgtConnectionCount);

            for (int i = 0; i < srcConnectionCount; i++)
            {
                ConnectionDefinition def = srcHdnConnectionDefArr[i];
                NeuronGene srcNeuronGene = inputNeuronGeneList[def._sourceNeuronIdx];
                NeuronGene tgtNeuronGene = hiddenNeuronGeneList[def._targetNeuronIdx];

                ConnectionGene cGene = new ConnectionGene(def._innovationId,
                    srcNeuronGene.InnovationId,
                    tgtNeuronGene.InnovationId,
                    GenerateRandomConnectionWeight());
                connectionGeneList.Add(cGene);

                // Register connection with endpoint neurons.
                srcNeuronGene.TargetNeurons.Add(cGene.TargetNodeId);
                tgtNeuronGene.SourceNeurons.Add(cGene.SourceNodeId);
            }

            for (int i = 0; i < tgtConnectionCount; i++)
            {
                ConnectionDefinition def = hdnTgtConnectionDefArr[i];
                NeuronGene srcNeuronGene = hiddenNeuronGeneList[def._sourceNeuronIdx];
                NeuronGene tgtNeuronGene = outputNeuronGeneList[def._targetNeuronIdx];

                ConnectionGene cGene = new ConnectionGene(def._innovationId,
                    srcNeuronGene.InnovationId,
                    tgtNeuronGene.InnovationId,
                    GenerateRandomConnectionWeight());
                connectionGeneList.Add(cGene);

                // Register connection with endpoint neurons.
                srcNeuronGene.TargetNeurons.Add(cGene.TargetNodeId);
                tgtNeuronGene.SourceNeurons.Add(cGene.SourceNodeId);
            }

            // Ensure connections are sorted.
            connectionGeneList.SortByInnovationId();

            // Create and return the completed genome object.
            return CreateGenome(_genomeIdGenerator.NextId, birthGeneration,
                neuronGeneList, connectionGeneList,
                _inputNeuronCount, _outputNeuronCount, false);
        }
Ejemplo n.º 19
0
        public static NeatGenome Read(XmlElement xmlGenome)
        {
            int inputNeuronCount  = 0;
            int outputNeuronCount = 0;

            uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));

            //--- Read neuron genes into a list.
            NeuronGeneList neuronGeneList  = new NeuronGeneList();
            XmlNodeList    listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");

            foreach (XmlElement xmlNeuronGene in listNeuronGenes)
            {
                NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

                // Count the input and output neurons as we go.
                switch (neuronGene.NeuronType)
                {
                case NeuronType.Input:
                    inputNeuronCount++;
                    break;

                case NeuronType.Output:
                    outputNeuronCount++;
                    break;
                }

                neuronGeneList.Add(neuronGene);
            }

            //--- Read module genes into a list.
            List <ModuleGene> moduleGeneList  = new List <ModuleGene>();
            XmlNodeList       listModuleGenes = xmlGenome.SelectNodes("modules/module");

            foreach (XmlElement xmlModuleGene in listModuleGenes)
            {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

            //--- Read connection genes into a list.
            ConnectionGeneList connectionGeneList  = new ConnectionGeneList();
            XmlNodeList        listConnectionGenes = xmlGenome.SelectNodes("connections/connection");

            foreach (XmlElement xmlConnectionGene in listConnectionGenes)
            {
                connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
            }

            //return new NeatGenome(id, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
            NeatGenome g = new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);

            g.Behavior            = ReadBehavior(xmlGenome.SelectSingleNode("behavior"));
            g.Behavior.objectives = new double[6];
            g.objectives          = new double[6];


            // JUSTIN: Read grid/trajectory info
            g.GridCoords          = ReadGrid(xmlGenome.SelectSingleNode("grid"));
            g.Behavior.trajectory = ReadTrajectory(xmlGenome.SelectSingleNode("trajectory"));

            return(g);
        }
Ejemplo n.º 20
0
        /// <summary>
        /// Reads a NeatGenome from XML.
        /// </summary>
        /// <param name="xr">The XmlReader to read from.</param>
        /// <param name="nodeFnIds">Indicates if node activation function IDs should be read. They are required
        /// for HyperNEAT genomes but not for NEAT</param>
        public static NeatGenome ReadGenome(XmlReader xr, bool nodeFnIds)
        {
            // Find <Network>.
            XmlIoUtils.MoveToElement(xr, false, __ElemNetwork);
            int initialDepth = xr.Depth;

            // Read genome ID attribute if present. Otherwise default to zero; it's the caller's responsibility to
            // check IDs are unique and in-line with the genome factory's ID generators.
            string genomeIdStr = xr.GetAttribute(__AttrId);
            uint genomeId;
            uint.TryParse(genomeIdStr, out genomeId);

            // Read birthGeneration attribute if present. Otherwise default to zero.
            string birthGenStr = xr.GetAttribute(__AttrBirthGeneration);
            uint birthGen;
            uint.TryParse(birthGenStr, out birthGen);

            // Find <Nodes>.
            XmlIoUtils.MoveToElement(xr, true, __ElemNodes);

            // Create a reader over the <Nodes> sub-tree.
            int inputNodeCount = 0;
            int outputNodeCount = 0;
            NeuronGeneList nGeneList = new NeuronGeneList();
            using(XmlReader xrSubtree = xr.ReadSubtree())
            {
                // Re-scan for the root <Nodes> element.
                XmlIoUtils.MoveToElement(xrSubtree, false);

                // Move to first node elem.
                XmlIoUtils.MoveToElement(xrSubtree, true, __ElemNode);

                // Read node elements.
                do
                {
                    NodeType neuronType = NetworkXmlIO.ReadAttributeAsNodeType(xrSubtree, __AttrType);
                    uint id = XmlIoUtils.ReadAttributeAsUInt(xrSubtree, __AttrId);
                    int functionId = 0;
                    double[] auxState = null;
                    if(nodeFnIds)
                    {	// Read activation fn ID.
                        functionId = XmlIoUtils.ReadAttributeAsInt(xrSubtree, __AttrActivationFunctionId);

                        // Read aux state as comma seperated list of real values.
                        auxState = XmlIoUtils.ReadAttributeAsDoubleArray(xrSubtree, __AttrAuxState);
                    }

                    NeuronGene nGene = new NeuronGene(id, neuronType, functionId, auxState);
                    nGeneList.Add(nGene);

                    // Track the number of input and output nodes.
                    switch(neuronType)
                    {
                        case NodeType.Input:
                            inputNodeCount++;
                            break;
                        case NodeType.Output:
                            outputNodeCount++;
                            break;
                    }
                }
                while(xrSubtree.ReadToNextSibling(__ElemNode));
            }

            // Find <Connections>.
            XmlIoUtils.MoveToElement(xr, false, __ElemConnections);

            // Create a reader over the <Connections> sub-tree.
            ConnectionGeneList cGeneList = new ConnectionGeneList();
            using(XmlReader xrSubtree = xr.ReadSubtree())
            {
                // Re-scan for the root <Connections> element.
                XmlIoUtils.MoveToElement(xrSubtree, false);

                // Move to first connection elem.
                string localName = XmlIoUtils.MoveToElement(xrSubtree, true);
                if(localName == __ElemConnection)
                {   // We have at least one connection.
                    // Read connection elements.
                    do
                    {
                        uint id = XmlIoUtils.ReadAttributeAsUInt(xrSubtree, __AttrId);
                        uint srcId = XmlIoUtils.ReadAttributeAsUInt(xrSubtree, __AttrSourceId);
                        uint tgtId = XmlIoUtils.ReadAttributeAsUInt(xrSubtree, __AttrTargetId);
                        double weight = XmlIoUtils.ReadAttributeAsDouble(xrSubtree, __AttrWeight);
                        ConnectionGene cGene = new ConnectionGene(id, srcId, tgtId, weight);
                        cGeneList.Add(cGene);
                    }
                    while(xrSubtree.ReadToNextSibling(__ElemConnection));
                }
            }

            // Move the reader beyond the closing tags </Connections> and </Network>.
            do
            {
                if (xr.Depth <= initialDepth) {
                    break;
                }
            }
            while(xr.Read());

            // Construct and return loaded NeatGenome.
            return new NeatGenome(null, genomeId, birthGen, nGeneList, cGeneList, inputNodeCount, outputNodeCount, true, true);
        }
Ejemplo n.º 21
0
        public virtual NeatGenome.NeatGenome generateGenome(INetwork network)
        {
            int    maxIterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            double epsilon       = 0.0;

            uint firstBias   = 0;
            uint lastBias    = biasCount;
            uint firstInput  = biasCount;
            uint lastInput   = biasCount + inputCount;
            uint firstOutput = biasCount + inputCount;
            uint lastOutput  = biasCount + inputCount + outputCount;
            uint firstHidden = biasCount + inputCount + outputCount;
            uint lastHidden  = biasCount + inputCount + outputCount + hiddenCount;

            float[]            coordinates = new float[4];
            float              output;
            uint               connectionCounter = 0;
            ConnectionGeneList connections       = new ConnectionGeneList();

            // give bias inputs to all hidden and output nodes.
            // the source of the the link is located at (0,0), the target is each node, and the weight of the link is the second output of CPPN.
            coordinates[0] = 0;
            coordinates[1] = 0;
            for (uint bias = firstBias; bias < lastBias; bias++)
            {
                // link the bias to all hidden nodes.
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                coordinates[3] = 0;
                for (uint hidden = firstHidden; hidden < lastHidden; hidden++)
                {
                    coordinates[2] += hiddenDelta;
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(1);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, bias, hidden, weight));
                    }
                }

                // link the bias to all output nodes.
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint outp = firstOutput; outp < lastOutput; outp++)
                {
                    coordinates[2] += outputDelta;
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(1);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, bias, outp, weight));
                    }
                }
            }

            if (hiddenCount > 0)
            {
                // link all input nodes to all hidden nodes.
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                coordinates[3] = 0;
                for (uint input = firstInput; input < lastInput; input++)
                {
                    coordinates[0] += inputDelta;
                    coordinates[2]  = -1 + hiddenDelta / 2.0f;
                    for (uint hidden = firstHidden; hidden < lastHidden; hidden++)
                    {
                        coordinates[2] += hiddenDelta;
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RelaxNetwork(maxIterations, epsilon);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, hidden, weight));
                        }
                    }
                }

                // link all hidden nodes to all output nodes.
                coordinates[0] = -1 + hiddenDelta / 2.0f;
                coordinates[1] = 0;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint hidden = firstHidden; hidden < lastHidden; hidden++)
                {
                    coordinates[0] += hiddenDelta;
                    coordinates[2]  = -1 + outputDelta / 2.0f;
                    for (uint outp = firstOutput; outp < lastOutput; outp++)
                    {
                        coordinates[2] += outputDelta;
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RelaxNetwork(maxIterations, epsilon);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, hidden, outp, weight));
                        }
                    }
                }
            }
            else     // there are no hidden nodes, so connect each input to each output only.
            // link all input nodes to all output nodes.
            {
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint input = firstInput; input < lastInput; input++)
                {
                    coordinates[0] += inputDelta;
                    coordinates[2]  = -1 + outputDelta / 2.0f;
                    for (uint outp = firstOutput; outp < lastOutput; output++)
                    {
                        coordinates[2] += outputDelta;
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RelaxNetwork(maxIterations, epsilon);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, outp, weight));
                        }
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
Ejemplo n.º 22
0
        public static IGenome CreateGenomePreserveID(NeatParameters neatParameters, IdGenerator idGenerator, int inputNeuronCount, int outputNeuronCount, float connectionProportion)
        {
            IActivationFunction actFunct;
            NeuronGene neuronGene; // temp variable.
            NeuronGeneList inputNeuronGeneList = new NeuronGeneList(); // includes bias neuron.
            NeuronGeneList outputNeuronGeneList = new NeuronGeneList();
            NeuronGeneList neuronGeneList = new NeuronGeneList();
            ConnectionGeneList connectionGeneList = new ConnectionGeneList();

            int nodeCount = 0;

            WINManager win = WINManager.SharedWIN;

            // IMPORTANT NOTE: The neurons must all be created prior to any connections. That way all of the genomes
            // will obtain the same innovation ID's for the bias,input and output nodes in the initial population.
            // Create a single bias neuron.
            //TODO: DAVID proper activation function change to NULL?
            actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
            //neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Bias, actFunct);
            WINNode neuronNode = win.findOrInsertNodeWithProperties(idGenerator,
                WINNode.NodeWithProperties(nodeCount++, NeuronType.Bias)
                );

            neuronGene = new NeuronGene(null, neuronNode.UniqueID, NeuronGene.INPUT_LAYER, NeuronType.Bias, actFunct);
            inputNeuronGeneList.Add(neuronGene);
            neuronGeneList.Add(neuronGene);

            // Create input neuron genes.
            actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
            for (int i = 0; i < inputNeuronCount; i++)
            {
                //TODO: DAVID proper activation function change to NULL?
                //neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Input, actFunct);
                neuronNode = win.findOrInsertNodeWithProperties(idGenerator, WINNode.NodeWithProperties(nodeCount++, NeuronType.Input));

                neuronGene = new NeuronGene(null, neuronNode.UniqueID, NeuronGene.INPUT_LAYER, NeuronType.Input, actFunct);
                inputNeuronGeneList.Add(neuronGene);
                neuronGeneList.Add(neuronGene);
            }

            // Create output neuron genes.
            //actFunct = ActivationFunctionFactory.GetActivationFunction("NullFn");
            for (int i = 0; i < outputNeuronCount; i++)
            {
                actFunct = ActivationFunctionFactory.GetActivationFunction("BipolarSigmoid");
                //actFunct = ActivationFunctionFactory.GetRandomActivationFunction(neatParameters);
                //TODO: DAVID proper activation function
                //neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Output, actFunct);
                neuronNode = win.findOrInsertNodeWithProperties(idGenerator, WINNode.NodeWithProperties(nodeCount++, NeuronType.Output));

                neuronGene = new NeuronGene(null, neuronNode.UniqueID, NeuronGene.OUTPUT_LAYER, NeuronType.Output, actFunct);
                outputNeuronGeneList.Add(neuronGene);
                neuronGeneList.Add(neuronGene);
            }

            int currentConnCount = 0;
            WINConnection winConn;
            // Loop over all possible connections from input to output nodes and create a number of connections based upon
            // connectionProportion.
            foreach (NeuronGene targetNeuronGene in outputNeuronGeneList)
            {
                foreach (NeuronGene sourceNeuronGene in inputNeuronGeneList)
                {
                    // Always generate an ID even if we aren't going to use it. This is necessary to ensure connections
                    // between the same neurons always have the same ID throughout the generated population.
                    //PAUL NOTE:
                    //instead of generating and not using and id, we use the target and connection properties to uniquely identify a connection in WIN
                    //uint connectionInnovationId = idGenerator.NextInnovationId;

                    if (Utilities.NextDouble() < connectionProportion)
                    {
                        // Ok lets create a connection.
                        //first we search or create the winconnection object
                        winConn = win.findOrInsertConnectionWithProperties(idGenerator,
                            WINConnection.ConnectionWithProperties(currentConnCount, sourceNeuronGene.InnovationId, targetNeuronGene.InnovationId));

                        //our winconn will have our innovationID, and our weight like normal
                        //this will also respect the idgenerator, since it gets sent in as well, for legacy purposes
                        connectionGeneList.Add(new ConnectionGene(winConn.UniqueID,
                            sourceNeuronGene.InnovationId,
                            targetNeuronGene.InnovationId,
                            (Utilities.NextDouble() * neatParameters.connectionWeightRange) - neatParameters.connectionWeightRange / 2.0)
                            );
                            //(Utilities.NextDouble() * neatParameters.connectionWeightRange) - neatParameters.connectionWeightRange / 2.0));  // Weight 0 +-5
                    }

                    currentConnCount++;

                }
            }
            //WIN will eventually be in control of all the genomes that are created as well, but not quite yet!
            //TODO: WIN should be generating genomeIDs explicitly

            // Don't create any hidden nodes at this point. Fundamental to the NEAT way is to start minimally!
            return new NeatGenome(idGenerator.NextGenomeId, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);
        }
Ejemplo n.º 23
0
        public virtual NeatGenome GenerateGenome(SharpNeatLib.NeuralNetwork.INetwork cppnNetwork)
        {
            ConnectionGeneList connections = new ConnectionGeneList();

            int maxIterations = 2 * (cppnNetwork.TotalNeuronCount - (cppnNetwork.InputNeuronCount + cppnNetwork.OutputNeuronCount)) + 1;

            // TODO:
            maxIterations = Math.Min(maxIterations, 4);

            // TODO:
            double epsilon     = 0.0;
            double threshold   = HyperNEATParameters.threshold;
            double weightRange = HyperNEATParameters.weightRange;


            uint biasid           = 0u;
            uint inputsStart      = biasid + (m_useBias ? 1u : 0u);
            uint inputsEnd        = (uint)(inputsStart + (m_rows * m_cols));
            uint outputsStart     = inputsEnd;
            uint outputsEnd       = (uint)(outputsStart + (m_rows * m_cols));
            uint firstHiddenStart = outputsEnd;

            float[] coordinates = new float[4];
            float   output;
            uint    connectionCounter = 0u;

            if (m_useBias)
            {
                // we use the bias neuron on the center, and use the
                // 2nd output of the CPPN to compute its weight
                coordinates[0] = 0.0f;
                coordinates[1] = 0.0f;

                // add the bias link to all neurons of the next layer
                for (int ni = 0, ncount = m_rows * m_cols; ni < ncount; ni++)
                {
                    int row = (ni / m_cols);
                    int col = (ni % m_cols);

                    coordinates[2] = (-1.0f) + (m_colDelta * (col + 0.5f));
                    coordinates[3] = (-1.0f) + (m_rowDelta * (row + 0.5f));

                    cppnNetwork.ClearSignals();
                    cppnNetwork.SetInputSignals(coordinates);
                    cppnNetwork.RelaxNetwork(maxIterations, epsilon);

                    for (int li = 0; li < m_numLayers - 1; li++)
                    {
                        output = cppnNetwork.GetOutputSignal(li * 2 + 1);
                        uint lstart = (uint)(firstHiddenStart + li * (m_cols * m_rows));
                        if (li == m_numLayers - 2)
                        {
                            lstart = outputsStart;
                        }

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++,
                                                               biasid, (uint)(lstart + ni), weight));
                        }
                    }
                }
            }

            // now add the connections
            for (int inpi = 0, inpcount = m_rows * m_cols; inpi < inpcount; inpi++)
            {
                for (int outi = 0, outcount = m_rows * m_cols; outi < outcount; outi++)
                {
                    int inrow = (inpi / m_cols); int incol = (inpi % m_cols);
                    int outrow = (outi / m_cols); int outcol = (outi % m_cols);

                    coordinates[0] = (-1.0f) + (m_colDelta * (incol + 0.5f));
                    coordinates[1] = (-1.0f) + (m_rowDelta * (inrow + 0.5f));
                    coordinates[2] = (-1.0f) + (m_colDelta * (outcol + 0.5f));
                    coordinates[3] = (-1.0f) + (m_rowDelta * (outrow + 0.5f));

                    cppnNetwork.ClearSignals();
                    cppnNetwork.SetInputSignals(coordinates);
                    cppnNetwork.RelaxNetwork(maxIterations, epsilon);

                    for (int li = 0; li < m_numLayers - 1; li++)
                    {
                        output = cppnNetwork.GetOutputSignal(m_useBias ? li * 2 : li);

                        uint dststart = (uint)(firstHiddenStart + li * (m_cols * m_rows));
                        if (li == m_numLayers - 2)
                        {
                            dststart = outputsStart;
                        }

                        uint srcstart = (uint)(firstHiddenStart + (li - 1) * (m_cols * m_rows));
                        if (li == 0)
                        {
                            srcstart = inputsStart;
                        }

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++,
                                                               (uint)(srcstart + inpi), (uint)(dststart + outi), weight));
                        }
                    }
                }
            }


            return(new SharpNeatLib.NeatGenome.NeatGenome(0, m_neurons, connections, m_rows * m_cols, m_rows * m_cols));
        }
        public override SharpNeatLib.NeatGenome.NeatGenome GenerateGenome(INetwork network)
        {
            int maxIterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            // TODO:
            maxIterations = Math.Min(maxIterations, 4);

            double epsilon     = 0.0;
            double threshold   = HyperNEATParameters.threshold;
            double weightRange = HyperNEATParameters.weightRange;

            // store constant ids for later references

            uint biasid       = 0u;
            uint inputsStart  = m_useBias ? 1u : 0u;
            uint inputsEnd    = (uint)(inputsStart + (m_rows * m_cols));
            uint outputsStart = inputsEnd;
            uint outputsEnd   = (uint)(outputsStart + (m_rows * m_cols));

            float[] coordinates = new float[8];

            coordinates[2] = coordinates[6] = m_homex;
            coordinates[3] = coordinates[7] = m_homey;

            float output;
            uint  connectionCounter        = 0;
            ConnectionGeneList connections = new ConnectionGeneList();

            if (m_useBias)
            {
                // we use the bias neuron on the center, and use the
                // 2nd output of the CPPN to compute its weight
                coordinates[0] = 0.0f;
                coordinates[1] = 0.0f;

                // add the bias link to all output neurons
                for (int ni = 0, ncount = m_rows * m_cols; ni < ncount; ni++)
                {
                    int row = (ni / m_cols);
                    int col = (ni % m_cols);

                    coordinates[4] = (-1.0f) + (m_colDelta * (col + 0.5f));
                    coordinates[5] = (-1.0f) + (m_rowDelta * (row + 0.5f));

                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(m_cppnOutIndex + 1);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, biasid, (uint)(outputsStart + ni), weight));
                    }
                }
            }

            // now add possible connections between all input output neuron pairs

            for (int inpi = 0, inpcount = m_rows * m_cols; inpi < inpcount; inpi++)
            {
                for (int outi = 0, outcount = m_rows * m_cols; outi < outcount; outi++)
                {
                    int inrow = (inpi / m_cols); int incol = (inpi % m_cols);
                    int outrow = (outi / m_cols); int outcol = (outi % m_cols);

                    coordinates[0] = (-1.0f) + (m_colDelta * (incol + 0.5f));
                    coordinates[1] = (-1.0f) + (m_rowDelta * (inrow + 0.5f));
                    coordinates[4] = (-1.0f) + (m_colDelta * (outcol + 0.5f));
                    coordinates[5] = (-1.0f) + (m_rowDelta * (outrow + 0.5f));

                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(m_cppnOutIndex);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++,
                                                           (uint)(inputsStart + inpi), (uint)(outputsStart + outi), weight));
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, m_neurons, connections, m_rows * m_cols, m_rows * m_cols));
        }
Ejemplo n.º 25
0
        public NeatGenome.NeatGenome generateGenomeStackSituationalPolicy(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet, float signal)
        {
            // Schrum: For debugging
            //Console.WriteLine("generateGenomeStackSituationalPolicy:signal=" + signal);
            //Console.WriteLine("CPPN inputs = " + network.InputNeuronCount);

            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));
            // Schrum: Too many inputs: Only store those that are needed
            //float[] coordinates = new float[5 + 1]; // <-- Schrum: bit sloppy: frequently results in unused CPPN inputs. Should make more precise
            float[] coordinates = new float[network.InputNeuronCount]; // Schrum: CPPN tracks how many inputs it needs
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount+totalInputCount];

            // Schrum: If we are inside this function, then we either have a heterogeneous team
            //         of a single agent (not sure why that ended up being the case; odd use of homogeneousTeam).
            //         Therefore, numberOfAgents tells us whether we need to save space for a Z-coordinate,
            //         and whether we are expecting a Situation input.
            if (numberOfAgents == 1 && coordinates.Length > 4)
                coordinates[4] = signal; // No Z coord, but save situation
            else if (coordinates.Length > 5)
                coordinates[5] = signal; // Both Z coord and situation

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in thisorder: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                // Schrum: Only include Z-coord as input if there are multiple team members
                if (numberOfAgents > 1)
                    coordinates[4] = stackCoordinate; // Schrum: z-coord will always be at index 4

                // Schrum: Debug
                //Console.WriteLine("CPPN inputs (first 4 blank): " + string.Join(",", coordinates));


                uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;


                        foreach (PointF source in ng.NeuronPositions)
                        {

                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent *  InputCount) + ng.GlobalID + sourceCount; break;
                                    //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;
                                    //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                    case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }
                                                                
                                //--- bias
                                //-----------------Get the bias of the target node
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID] = true;
                                }
                                //--bias



                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                // Schrum: Debug
                                //Console.WriteLine("CPPN inputs: " + string.Join(",", coordinates));

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                // Schrum: Observation: It seems impossible to use both LEO and adaptive networks because of these hardcoded magic numbers
                                if (adaptiveNetwork)
                                {
                                    A = network.GetOutputSignal(2);
                                    B = network.GetOutputSignal(3);
                                    C = network.GetOutputSignal(4);
                                    D = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                // Schrum: Observation: In long run, might be desirable to use LEO, but incompatible with special preference neuron output
                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo = network.GetOutputSignal(2);
                                }

                                // Schrum: This is a horrible hack, but it gets the job done for now. 
                                // The reason this works is that it makes the following assumptions that could easily be broken in the future:
                                // 1) It is assumed that the only reason a CPPN would have 3 outputs per policy is if the third is for preference links
                                // 2) It is assumed that in a substrate with a preference neuron, the y-coord will always be 0.8, and no other neuron will have
                                //    that y-coord.
                                //Console.WriteLine("output:" + coordinates[0] + "," + coordinates[1] + ":" + coordinates[2] + "," + coordinates[3]);
                                //Console.WriteLine("network.OutputsPerPolicy == 3" + (network.OutputsPerPolicy == 3));
                                //Console.WriteLine("target.Y == 0.8" + (target.Y == 0.8f));
                                if (network.OutputsPerPolicy == 3 && target.Y == 0.8f)
                                {
                                    // The output from the link for the preference neuron replaces the standard output.
                                    // Because the link weight is defined by a totally different CPPN output, the preference
                                    // neuron is more free to behave very differently.
                                    output = network.GetOutputSignal(2);
                                    //Console.WriteLine("Preference output:" + coordinates[0] + "," + coordinates[1] + ":" + coordinates[2] + "," + coordinates[3]);
                                }

                                if (!useLeo || leo > 0.0)
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight =
 (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) *
 weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive networkset weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new
 ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref
coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;

            // Schrum: Debugging
            // Looking at the control networks has revealed that the order of details in the substrate
            // description is important. The layer with the preference neuron has to be defined last
            // if it is to be the final neuron in the linearly organized output layer.
            //XmlDocument doc = new XmlDocument();
            //SharpNeatLib.NeatGenome.Xml.XmlGenomeWriterStatic.Write(doc, sng);
            //System.IO.FileInfo oFileInfo = new System.IO.FileInfo("temp.xml");
            //doc.Save(oFileInfo.FullName);

            return sng;
        }
        // NOTE: Multi-Plane Substrates ARE MAYBE supported by this method!
        private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool  adaptiveNetwork,bool  modulatoryNet)
        {
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));
            float[] coordinates = new float[4]; //JUSTIN: CHANGE THIS BACK TO [4]!!!
            float output;
            uint connectionCounter = 0;
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount;
            uint totalInputCount = InputCount;
            uint totalHiddenCount = HiddenCount;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            // CPPN Outputs: [ Weights ] [ Biases ]
            // When using multi-plane substrates, there will be multiple Weight and Bias outputs.
            // There is a Weight output for every plane-to-plane connection (including a plane connected to itself, as in regular substrates)
            // There is a Bias output for every plane
            // Since "regular substrates" only have 1 plane, they only have 1 Weight and 1 Bias output. MP substrates have more. :)
            int numPlanes = planes.Count;
            int numPlaneConnections = planesConnected.Count;
            int computedIndex;

            uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;

            foreach (NeuronGroup ng in neuronGroups)
            {
                foreach (uint connectedTo in ng.ConnectedTo)
                {
                    connectedNG = getNeuronGroup(connectedTo);

                    sourceCount = 0;
                    foreach (PointF source in ng.NeuronPositions)
                    {

                        //-----------------Get the bias of the source node
                        /*switch (ng.GroupType)
                        {
                            case 0: sourceID = ng.GlobalID + sourceCount; break;                             //Input
                            case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                //Output
                            case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break;  //Hidden
                        }
                        coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RecursiveActivation();//network.MultipleSteps(iterations);

                        neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                        //*///----------------------------

                        targetCout = 0;
                        foreach (PointF target in connectedNG.NeuronPositions)
                        {
                            switch (ng.GroupType)
                            {
                                case 0: sourceID = ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break;  //Hidden
                            }

                            switch (connectedNG.GroupType)
                            {
                                case 0: targetID = connectedNG.GlobalID + targetCout; break;
                                case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;
                                case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
                            }

                            //-----------------Get the bias of the target node
                            coordinates[0] = target.X; coordinates[1] = target.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
                            //coordinates[4] = 0.0f; coordinates[5] = 0.0f; //JUSTIN: REMOVE THIS!!!
                            //String s = arrayToString(coordinates);
                            //if (weights.ContainsKey(s))
                            //    neurons[(int)targetID].Bias = weights[s];
                            //else
                            {
                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.RecursiveActivation();//network.MultipleSteps(iterations);
                                computedIndex = numPlaneConnections + planes.IndexOf(connectedNG.Plane);
                                //neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(computedIndex) * weightRange);
                                //weights.Add(s,neurons[(int)targetID].Bias);
                            }
                            //----------------------------

                            coordinates[0] = source.X;
                            coordinates[1] = source.Y;
                            coordinates[2] = target.X;
                            coordinates[3] = target.Y;
                            //coordinates[4] = source.X - target.X; coordinates[5] = source.Y - target.Y; //JUSTIN: REMOVE THIS!!!

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.RecursiveActivation();//network.MultipleSteps(iterations);
                            computedIndex = indexOfPlaneConnection(ng.Plane, connectedNG.Plane);
                            //output = network.GetOutputSignal(0);
                            output = network.GetOutputSignal(computedIndex);

                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates));
                            }
                            //else
                            //{
                            //    Console.WriteLine("Not connected");
                            //}
                            targetCout++;
                        }
                        sourceCount++;
                    }
                }
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
            return gn;
        }
Ejemplo n.º 27
0
        public override NeatGenome generateGenome(INetwork network)
        {
            var  coordinates               = new double[6];
            int  iterations                = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            uint connectionCounter         = 0;
            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));


            for (int layer = -1; layer < 1; layer++)
            {
                coordinates[0] = layer;
                coordinates[3] = layer + 1;
                uint srcRow = 0;
                for (float row1 = -1; row1 <= 1; row1 += 0.5f, srcRow++)
                {
                    coordinates[1] = row1;
                    uint srcCol = 0;
                    for (float col1 = -1; col1 <= 1; col1 += 0.5f, srcCol++)
                    {
                        coordinates[2] = col1;
                        uint tarRow = 0;
                        for (float row2 = -1; row2 <= 1; row2 += 0.5f, tarRow++)
                        {
                            coordinates[4] = row2;
                            uint tarCol = 0;
                            for (float col2 = -1; col2 <= 1; col2 += 0.5f, tarCol++)
                            {
                                coordinates[5] = col2;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.MultipleSteps(iterations);
                                float output = network.GetOutputSignal(0);
                                network.ClearSignals();

                                if (Math.Abs(output) > threshold)
                                {
                                    uint source = srcRow * 5 + srcCol;
                                    if (layer == 0)
                                    {
                                        source += inputCount + outputCount;
                                    }
                                    uint target = tarRow * 5 + tarCol;
                                    if (layer == -1)
                                    {
                                        target += inputCount + outputCount;
                                    }
                                    else
                                    {
                                        target += inputCount;
                                    }

                                    float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                    connections.Add(new ConnectionGene(connectionCounter++, source, target, weight));
                                }
                            }

                            if (row2 == -0.5f)
                            {
                                row2 += 0.5f;
                            }
                        }
                    }

                    if (row1 == -0.5f)
                    {
                        row1 += 0.5f;
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
        // NOTE: Multi-Plane Substrates ARE supported by this method!
        private NeatGenome.NeatGenome generateHiveBrainGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet,bool ct)
        {
            //bool relativeCoordinate = false;
            bool oneWay = false;
            bool homogeneous = false ;
            Dictionary<String, float> weights = new Dictionary<String, float>();
            float timeConstantMin = 0.1f;
            float timeConstantMax = 2.0f;

            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount))); // TODO: Perhaps get an exact count of connections in the constructor and use that value here?
            float[] coordinates = new float[5]; //JUSTIN: Used to be 6 coordinates, zstack was duplicated for relativeCoordinate hyjinx. fixed it. // Inputs to the CPPN: [srcX, srcY, tgX, tgY, zstack]
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            // CPPN Outputs: [ Weights ] [ Biases ]
            // When using multi-plane substrates, there will be multiple Weight and Bias outputs.
            // There is a Weight output for every plane-to-plane connection (including a plane connected to itself, as in regular substrates)
            // There is a Bias output for every plane
            // Since "regular substrates" only have 1 plane, they only have 1 Weight and 1 Bias output. MP substrates have more. :)
            int numPlanes = planes.Count;
            int numPlaneConnections = planesConnected.Count;
            int computedIndex;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                //coordinates[4] = homogeneous ? 0 : stackCoordinate;//-1 ? -1 : 0;//0;//stackCoordinate;
                //coordinates[5] = stackCoordinate;
                uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        /*if (!relativeCoordinate)
                            coordinates[5] = stackCoordinate;
                        else //USE RELATIVE
                            coordinates[5] = 0;//*/

                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {

                            //-----------------Get the bias of the source node
                           /* switch (ng.GroupType)
                            {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                            }
                            coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.RecursiveActivation();//network.MultipleSteps(iterations);

                            neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                            if (ct)
                            {
                                neurons[(int)sourceID].TimeConstant = 0.01f + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * .05f);
                                System.Diagnostics.Debug.Assert(neurons[(int)sourceID].TimeConstant > 0);
                            }*/
                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                    case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //-----------------Get the bias of the target node
                                   coordinates[0] = target.X; coordinates[1] = target.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
                                   //String s = arrayToString(coordinates);
                                   //if (weights.ContainsKey(s))
                                   //    neurons[(int)targetID].Bias = weights[s];
                                   //else
                                   {
                                       network.ClearSignals();
                                       network.SetInputSignals(coordinates);
                                       network.RecursiveActivation();//network.MultipleSteps(iterations);
                                       computedIndex = numPlaneConnections + planes.IndexOf(connectedNG.Plane);
                                       //neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                       neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(computedIndex) * weightRange);
                                       //weights.Add(s,neurons[(int)targetID].Bias);
                                   }
                                   if (ct)
                                   {
                                       neurons[(int)targetID].TimeConstant = timeConstantMin + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * (timeConstantMax - timeConstantMin));
                                       System.Diagnostics.Debug.Assert(neurons[(int)targetID].TimeConstant > 0);
                                   }
                                //----------------------------

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;
                                //Console.WriteLine(arrayToString(coordinates));
                                
                                //if(weights.ContainsKey(s))
                                //    output = weights[s];
                                //else
                                {
                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    network.RecursiveActivation();//network.MultipleSteps(iterations);
                                    computedIndex = indexOfPlaneConnection(ng.Plane, connectedNG.Plane);
                                    //output = network.GetOutputSignal(0);
                                    output = network.GetOutputSignal(computedIndex);
                                    //weights.Add(s, output);
                                }
                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A = network.GetOutputSignal(2);
                                    B = network.GetOutputSignal(3);
                                    C = network.GetOutputSignal(4);
                                    D = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates));
                                    }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }

                    foreach (uint connectedTo in ng.HiveConnectedTo)
                    {
                        bool wrapAround = true;

                        for (uint agentConnect = 0; agentConnect < stackCoordinates.Count; agentConnect++)
                        {
                            //Make sure we're not making a recurrent connection on the same agent
                            //if (agentConnect == agent)
                            //    continue;
                           // else if ((agent == stackCoordinates.Count - 1 && agentConnect == 0) || (agent == 0 && agentConnect == stackCoordinates.Count - 1))
                           //     ;//agentConnect = 0;
                            if (agent != 0 && agent != stackCoordinates.Count - 1)
                                continue;

                            //if (agent == 1)
                            //    continue;

                            //if (agentConnect != 0 )
                            //    continue;

                            //Limits connections to only neighbors.  Good?
                            //if (!((agent == 0 || agentConnect >= agent - 1) && agentConnect <= agent + 1))
                            //    continue;
                            //if (agentConnect > agent + 1 || agentConnect < agent - 1)
                            //    continue;

                            if (oneWay)
                            {
                                //ONE-WAY
                                if (agentConnect > agent + 1 || agentConnect < agent)
                                    continue;
                            }

                            /*if (!relativeCoordinate)
                                //USE THE Z COORDINATE
                                coordinates[5] = stackCoordinates[(int)agentConnect];
                            else
                                //USE THE RELATIVE COORDINATE
                                coordinates[5] = agentConnect > agent ? 1 : -1;
                            //*/
                            //WRAP AROUND
                            /*if (agent == stackCoordinates.Count - 1 && agentConnect == 0)
                                coordinates[5] = 1;
                            else if (agent == 0 && agentConnect == stackCoordinates.Count - 1)
                                coordinates[5] = -1;
                             */

                            connectedNG = getNeuronGroup(connectedTo);

                            sourceCount = 0;
                            foreach (PointF source in ng.NeuronPositions)
                            {

                                //-----------------Get the bias of the source node
                               /* switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                }
                                coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.RecursiveActivation();//network.MultipleSteps(iterations);

                                neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                if (ct)
                                {
                                    neurons[(int)sourceID].TimeConstant = 0.01f + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * .05f);
                                    System.Diagnostics.Debug.Assert(neurons[(int)sourceID].TimeConstant > 0);
                                }*/
                                //----------------------------

                                targetCout = 0;
                                foreach (PointF target in connectedNG.NeuronPositions)
                                {
                                    /*if ((source.X != target.X))
                                    {
                                        targetCout++;
                                        continue;
                                    }*/
                                    if (/*source.X!= target.X ||*/ target.X != coordinates[4])// || source.X!= coordinates[4])
                                    {
                                        targetCout++;
                                        continue;
                                    }
                                   /* if (agent != 0 && agent != stackCoordinates.Count - 1)
                                    { 
                                        if(agentConnect != 0 && agentConnect != stackCoordinates.Count - 1)
                                        {
                                            targetCout++;
                                            continue;
                                        }
                                    }*/
                                    switch (ng.GroupType)
                                    {
                                        case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                        case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                        case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                    }

                                    switch (connectedNG.GroupType)
                                    {
                                        case 0: targetID = (agentConnect * InputCount) + connectedNG.GlobalID + targetCout; break;
                                        case 1: targetID = totalInputCount + (agentConnect * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                        case 2: targetID = totalInputCount + totalOutputCount + (agentConnect * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                    }

                                    //-----------------Get the bias of the target node
                                    coordinates[0] = target.X; coordinates[1] = target.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
                                    //String s = arrayToString(coordinates);
                                    //if (weights.ContainsKey(s))
                                    //    neurons[(int)targetID].Bias = weights[s];
                                    //else
                                    {
                                        network.ClearSignals();
                                        network.SetInputSignals(coordinates);
                                        network.RecursiveActivation();//network.MultipleSteps(iterations);
                                        computedIndex = numPlaneConnections + planes.IndexOf(connectedNG.Plane);
                                        //neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                        neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(computedIndex) * weightRange);
                                       // weights.Add(s, neurons[(int)targetID].Bias);
                                    }
                                    if (ct)
                                    {
                                        neurons[(int)targetID].TimeConstant = timeConstantMin + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * (timeConstantMax - timeConstantMin));
                                        System.Diagnostics.Debug.Assert(neurons[(int)targetID].TimeConstant > 0);
                                    }
                                    //----------------------------

                                    coordinates[0] = source.X;
                                    coordinates[1] = source.Y;
                                    coordinates[2] = target.X;
                                    coordinates[3] = target.Y;
                                    //s = arrayToString(coordinates);
                                    //if (weights.ContainsKey(s))
                                    //    output = weights[s];
                                    //else
                                    {
                                        network.ClearSignals();
                                        network.SetInputSignals(coordinates);
                                        network.RecursiveActivation();//network.MultipleSteps(iterations);
                                        computedIndex = indexOfPlaneConnection(ng.Plane, connectedNG.Plane);
                                        //output = network.GetOutputSignal(0);
                                        output = network.GetOutputSignal(computedIndex);
                                      //  weights.Add(s, output);
                                    }

                                    double leo = 0.0;

                                    if (adaptiveNetwork)
                                    {
                                        A = network.GetOutputSignal(2);
                                        B = network.GetOutputSignal(3);
                                        C = network.GetOutputSignal(4);
                                        D = network.GetOutputSignal(5);
                                        learningRate = network.GetOutputSignal(6);
                                    }

                                    if (modulatoryNet)
                                    {
                                        modConnection = network.GetOutputSignal(7);
                                    }
                                    else
                                    {
                                        modConnection = 0.0f;
                                    }

                                    if (useLeo)
                                    {
                                        threshold = 0.0;
                                        leo = network.GetOutputSignal(2);
                                    }

                                    if (!useLeo || leo > 0.0)
                                        if (Math.Abs(output) > threshold)
                                        {
                                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                            //if (adaptiveNetwork)
                                            //{
                                            //    //If adaptive network set weight to small value
                                            //    weight = 0.1f;
                                            //}
                                            connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, true));
                                        }
                                    //else
                                    //{
                                    //    Console.WriteLine("Not connected");
                                    //}
                                    targetCout++;
                                }
                                sourceCount++;
                            }
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
Ejemplo n.º 29
0
        public virtual NeatGenome.NeatGenome generateGenome(INetwork network)
        {
            double[]           coordinates = new double[4];
            float              output;
            uint               connectionCounter = 0;
            int                iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            ConnectionGeneList connections       = new ConnectionGeneList();

            if (hiddenCount > 0)
            {
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                coordinates[3] = 0;
                for (uint input = 0; input < inputCount; input++, coordinates[0] += inputDelta)
                {
                    coordinates[2] = -1 + hiddenDelta / 2.0f;
                    for (uint hidden = 0; hidden < hiddenCount; hidden++, coordinates[2] += hiddenDelta)
                    {
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, hidden + inputCount + outputCount, weight));
                        }
                    }
                }
                coordinates[0] = -1 + hiddenDelta / 2.0f;
                coordinates[1] = 0;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint hidden = 0; hidden < hiddenCount; hidden++, coordinates[0] += hiddenDelta)
                {
                    coordinates[2] = -1 + outputDelta / 2.0f;
                    for (uint outputs = 0; outputs < outputCount; outputs++, coordinates[2] += outputDelta)
                    {
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, hidden + inputCount + outputCount, outputs + inputCount, weight));
                        }
                    }
                }
            }
            else
            {
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint input = 0; input < inputCount; input++, coordinates[0] += inputDelta)
                {
                    coordinates[2] = -1 + outputDelta / 2.0f;
                    for (uint outputs = 0; outputs < outputCount; outputs++, coordinates[2] += outputDelta)
                    {
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, outputs + inputCount, weight));
                        }
                    }
                }
            }
            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
Ejemplo n.º 30
0
        public override NeatGenome.NeatGenome generateGenome(INetwork network)
        {
            #if OUTPUT
            System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
            #endif
            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));
            float[] coordinates = new float[4];
            float output;
            uint connectionCounter = 0;
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            coordinates[0] = -1 + inputDelta / 2.0f;
            coordinates[1] = -1;
            coordinates[2] = -1 + hiddenDelta / 2.0f;
            coordinates[3] = 0;

            for (uint source = 0; source < inputCount; source++, coordinates[0] += inputDelta)
            {
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                for (uint target = 0; target < hiddenCount; target++, coordinates[2] += hiddenDelta)
                {

                    //Since there are an equal number of input and hidden nodes, we check these everytime
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    output = network.GetOutputSignal(0);
            #if OUTPUT
                            foreach (double d in inputs)
                                sw.Write(d + " ");
                            sw.Write(output);
                            sw.WriteLine();
            #endif
                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, source, target + inputCount + outputCount, weight));
                    }

                    //Since every other hidden node has a corresponding output node, we check every other time
                    if (target % 2 == 0)
                    {
                        network.ClearSignals();
                        coordinates[1] = 0;
                        coordinates[3] = 1;
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);
            #if OUTPUT
                            foreach (double d in inputs)
                                sw.Write(d + " ");
                            sw.Write(output);
                            sw.WriteLine();
            #endif
                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, (target / 2) + inputCount, weight));
                        }
                        coordinates[1] = -1;
                        coordinates[3] = 0;

                    }
                }
            }
            #if OUTPUT
            sw.Flush();
            #endif
            return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
        }
        public override NeatGenome.NeatGenome generateGenome(INetwork network)
        {
#if OUTPUT
            System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
#endif
            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));
            float[]            coordinates = new float[4];
            float output;
            uint  connectionCounter = 0;
            int   iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            coordinates[0] = -1 + inputDelta / 2.0f;
            coordinates[1] = -1;
            coordinates[2] = -1 + hiddenDelta / 2.0f;
            coordinates[3] = 0;

            for (uint source = 0; source < inputCount; source++, coordinates[0] += inputDelta)
            {
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                for (uint target = 0; target < hiddenCount; target++, coordinates[2] += hiddenDelta)
                {
                    //Since there are an equal number of input and hidden nodes, we check these everytime
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    output = network.GetOutputSignal(0);
#if OUTPUT
                    foreach (double d in inputs)
                    {
                        sw.Write(d + " ");
                    }
                    sw.Write(output);
                    sw.WriteLine();
#endif
                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, source, target + inputCount + outputCount, weight));
                    }

                    //Since every other hidden node has a corresponding output node, we check every other time
                    if (target % 2 == 0)
                    {
                        network.ClearSignals();
                        coordinates[1] = 0;
                        coordinates[3] = 1;
                        network.SetInputSignals(coordinates);
                        network.MultipleSteps(iterations);
                        output = network.GetOutputSignal(0);
#if OUTPUT
                        foreach (double d in inputs)
                        {
                            sw.Write(d + " ");
                        }
                        sw.Write(output);
                        sw.WriteLine();
#endif
                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, (target / 2) + inputCount, weight));
                        }
                        coordinates[1] = -1;
                        coordinates[3] = 0;
                    }
                }
            }
#if OUTPUT
            sw.Flush();
#endif
            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
Ejemplo n.º 32
0
        /*
         * The main method that generations a list of ANN connections based on the information in the
         * underlying hypercube.
         * Input : CPPN, InputPositions, OutputPositions, ES-HyperNEAT parameters
         * Output: Connections, HiddenNodes
         */
        public void generateSubstrate(List <PointF> inputNeuronPositions, List <PointF> outputNeuronPositions,
                                      INetwork genome, int initialDepth, float varianceThreshold, float bandThreshold, int ESIterations,
                                      float divsionThreshold, int maxDepth,
                                      uint inputCount, uint outputCount,
                                      ref ConnectionGeneList connections, ref List <PointF> hiddenNeurons, bool useLeo = false)
        {
            List <TempConnection> tempConnections = new List <TempConnection>();
            int  sourceIndex, targetIndex = 0;
            uint counter = 0;

            this.genome            = (ModularNetwork)genome;
            this.initialDepth      = initialDepth;
            this.maxDepth          = maxDepth;
            this.varianceThreshold = varianceThreshold;
            this.bandThrehold      = bandThreshold;
            this.divisionThreshold = divsionThreshold;

            //CONNECTIONS DIRECTLY FROM INPUT NODES
            sourceIndex = 0;
            foreach (PointF input in inputNeuronPositions)
            {
                // Analyze outgoing connectivity pattern from this input
                QuadPoint root = QuadTreeInitialisation(input.X, input.Y, true, (int)initialDepth, (int)maxDepth);
                tempConnections.Clear();
                // Traverse quadtree and add connections to list
                PruneAndExpress(input.X, input.Y, ref tempConnections, root, true, maxDepth);

                foreach (TempConnection p in tempConnections)
                {
                    PointF newp = new PointF(p.x2, p.y2);

                    targetIndex = hiddenNeurons.IndexOf(newp);
                    if (targetIndex == -1)
                    {
                        targetIndex = hiddenNeurons.Count;
                        hiddenNeurons.Add(newp);
                    }
                    connections.Add(new ConnectionGene(counter++, (sourceIndex), (targetIndex + inputCount + outputCount), p.weight * HyperNEATParameters.weightRange, new float[] { p.x1, p.y1, p.x2, p.y2 }, p.Outputs));
                }
                sourceIndex++;
            }

            tempConnections.Clear();

            List <PointF> unexploredHiddenNodes = new List <PointF>();

            unexploredHiddenNodes.AddRange(hiddenNeurons);

            for (int step = 0; step < ESIterations; step++)
            {
                foreach (PointF hiddenP in unexploredHiddenNodes)
                {
                    tempConnections.Clear();
                    QuadPoint root = QuadTreeInitialisation(hiddenP.X, hiddenP.Y, true, (int)initialDepth, (int)maxDepth);
                    PruneAndExpress(hiddenP.X, hiddenP.Y, ref tempConnections, root, true, maxDepth);

                    sourceIndex = hiddenNeurons.IndexOf(hiddenP);   //TODO there might a computationally less expensive way

                    foreach (TempConnection p in tempConnections)
                    {
                        PointF newp = new PointF(p.x2, p.y2);

                        targetIndex = hiddenNeurons.IndexOf(newp);
                        if (targetIndex == -1)
                        {
                            targetIndex = hiddenNeurons.Count;
                            hiddenNeurons.Add(newp);
                        }
                        connections.Add(new ConnectionGene(counter++, (sourceIndex + inputCount + outputCount), (targetIndex + inputCount + outputCount), p.weight * HyperNEATParameters.weightRange, new float[] { p.x1, p.y1, p.x2, p.y2 }, p.Outputs));
                    }
                }
                // Remove the just explored nodes
                List <PointF> temp = new List <PointF>();
                temp.AddRange(hiddenNeurons);
                foreach (PointF f in unexploredHiddenNodes)
                {
                    temp.Remove(f);
                }

                unexploredHiddenNodes = temp;
            }

            tempConnections.Clear();

            //CONNECT TO OUTPUT
            targetIndex = 0;
            foreach (PointF outputPos in outputNeuronPositions)
            {
                // Analyze incoming connectivity pattern to this output
                QuadPoint root = QuadTreeInitialisation(outputPos.X, outputPos.Y, false, (int)initialDepth, (int)maxDepth);
                tempConnections.Clear();
                PruneAndExpress(outputPos.X, outputPos.Y, ref tempConnections, root, false, maxDepth);


                PointF target = new PointF(outputPos.X, outputPos.Y);

                foreach (TempConnection t in tempConnections)
                {
                    PointF source = new PointF(t.x1, t.y1);
                    sourceIndex = hiddenNeurons.IndexOf(source);

                    /* New nodes not created here because all the hidden nodes that are
                     *  connected to an input/hidden node are already expressed. */
                    if (sourceIndex != -1)  //only connect if hidden neuron already exists
                    {
                        connections.Add(new ConnectionGene(counter++, (sourceIndex + inputCount + outputCount), (targetIndex + inputCount), t.weight * HyperNEATParameters.weightRange, new float[] { t.x1, t.y1, t.x2, t.y2 }, t.Outputs));
                    }
                }
                targetIndex++;
            }
        }
Ejemplo n.º 33
0
        // for predator prey, ignore
        public NeatGenome generateMultiGenomeModulus(INetwork network, uint numberOfAgents)
        {
#if OUTPUT
            System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
#endif
            var   coordinates = new double[4];
            float output;
            uint  connectionCounter = 0;

            uint inputsPerAgent  = inputCount / numberOfAgents;
            uint hiddenPerAgent  = hiddenCount / numberOfAgents;
            uint outputsPerAgent = outputCount / numberOfAgents;

            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));

            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            coordinates[0] = -1 + inputDelta / 2.0f;    //x1
            coordinates[1] = -1;                        //y1
            coordinates[2] = -1 + hiddenDelta / 2.0f;   //x2
            coordinates[3] = 0;                         //y2

            for (uint agent = 0; agent < numberOfAgents; agent++)
            {
                coordinates[0] = -1 + (agent * inputsPerAgent * inputDelta) + inputDelta / 2.0f;
                for (uint source = 0; source < inputsPerAgent; source++, coordinates[0] += inputDelta)
                {
                    coordinates[2] = -1 + (agent * hiddenPerAgent * hiddenDelta) + hiddenDelta / 2.0f;
                    for (uint target = 0; target < hiddenPerAgent; target++, coordinates[2] += hiddenDelta)
                    {
                        //Since there are an equal number of input and hidden nodes, we check these everytime
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        ((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
                        output = network.GetOutputSignal(0);
#if OUTPUT
                        foreach (double d in inputs)
                        {
                            sw.Write(d + " ");
                        }
                        sw.Write(output);
                        sw.WriteLine();
#endif
                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, (agent * inputsPerAgent) + source, (agent * hiddenPerAgent) + target + inputCount + outputCount, weight));
                        }

                        //Since every other hidden node has a corresponding output node, we check every other time
                        if (target % 2 == 0)
                        {
                            network.ClearSignals();
                            coordinates[1] = 0;
                            coordinates[3] = 1;
                            network.SetInputSignals(coordinates);
                            ((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
                            output = network.GetOutputSignal(0);
#if OUTPUT
                            foreach (double d in inputs)
                            {
                                sw.Write(d + " ");
                            }
                            sw.Write(output);
                            sw.WriteLine();
#endif
                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, (agent * hiddenPerAgent) + source + inputCount + outputCount, ((outputsPerAgent * agent) + ((target) / 2)) + inputCount, weight));
                            }
                            coordinates[1] = -1;
                            coordinates[3] = 0;
                        }
                    }
                }
            }
#if OUTPUT
            sw.Flush();
#endif
            //Console.WriteLine(count);
            //Console.ReadLine();
            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
Ejemplo n.º 34
0
        public override NeatGenome.NeatGenome generateGenome(INetwork network)
        {
            float[]            coordinates = new float[4];
            float              outputR_RGB, outputG_RGB, outputB_RGB, outputHeading_HiddenH, outputRGB_DIR, outputRGB_P, outputHiddenH_DIR, outputHiddenH_P;
            uint               connectionCounter = 0;
            int                iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            ConnectionGeneList connections       = new ConnectionGeneList();

            // Query the CPPN for the connections between the RGB input and hidden layers
            xDelta = 2.0f / (rgbOneDimension);
            yDelta = 2.0f / (rgbOneDimension);

            coordinates[0] = -1 + xDelta / 2.0f;
            coordinates[1] = -1 + yDelta / 2.0f;
            coordinates[2] = -1 + xDelta / 2.0f;
            coordinates[3] = -1 + yDelta / 2.0f;

            for (uint x = 0; x < rgbOneDimension; x++, coordinates[0] += xDelta)
            {
                // Reset the y1 coordinate and then loop through all possible values of y1
                coordinates[1] = -1 + yDelta / 2.0f;
                for (uint y = 0; y < rgbOneDimension; y++, coordinates[1] += yDelta)
                {
                    // Reset the x2 coordinate and then loop through all possible values of x2
                    coordinates[2] = -1 + xDelta / 2.0f;
                    for (uint x2 = 0; x2 < rgbOneDimension; x2++, coordinates[2] += xDelta)
                    {
                        // Reset the y2 coordinate then loop through all possible values of y2
                        coordinates[3] = -1 + yDelta / 2.0f;
                        for (uint y2 = 0; y2 < rgbOneDimension; y2++, coordinates[3] += yDelta)
                        {
                            // Set the CPPN inputs, activate the CPPN, and read the output signals
                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.MultipleSteps(iterations);
                            outputR_RGB = network.GetOutputSignal(0);
                            outputG_RGB = network.GetOutputSignal(1);
                            outputB_RGB = network.GetOutputSignal(2);

                            // Calculate the weight of the R->RGB connection based on the CPPN output
                            if (Math.Abs(outputR_RGB) > threshold)
                            {
                                float weight = (float)(((Math.Abs(outputR_RGB) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputR_RGB));
                                //float weight = 0.0f;
                                connections.Add(new ConnectionGene(connectionCounter++, x + rgbOneDimension * y, (x2 + rgbOneDimension * y2) + inputCount + outputCount, weight));
                                //Console.WriteLine("R Generated connection from " + (x + rgbOneDimension * y) + " to " + ((x2 + rgbOneDimension * y2) + inputCount + outputCount));
                            }

                            // Calculate the weight of the G->RGB connection based on the CPPN output
                            if (Math.Abs(outputG_RGB) > threshold)
                            {
                                float weight = (float)(((Math.Abs(outputG_RGB) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputG_RGB));
                                //float weight = 0.0f;
                                connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + colorArraySize, (x2 + rgbOneDimension * y2) + inputCount + outputCount, weight));
                                //Console.WriteLine("G Generated connection from " + (x + rgbOneDimension * y) + " to " + ((x2 + rgbOneDimension * y2) + inputCount + outputCount));
                            }

                            // Calculate the weight of the B->RGB connection based on the CPPN output
                            if (Math.Abs(outputB_RGB) > threshold)
                            {
                                float weight = (float)(((Math.Abs(outputB_RGB) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputB_RGB));
                                //float weight = 0.0f;
                                connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + 2 * colorArraySize, (x2 + rgbOneDimension * y2) + inputCount + outputCount, weight));
                                //Console.WriteLine("B Generated connection from " + (x + rgbOneDimension * y) + " to " + ((x2 + rgbOneDimension * y2) + inputCount + outputCount));
                            }
                        }
                    }
                }
            }

            // Query the CPPN for the connections between the H input and hidden layers

            /*
             * uint headingX = 3;
             * uint headingY = 3;
             * xDelta = 1.0f;
             * yDelta = 1.0f;
             *
             * coordinates[0] = -1.0f;
             * coordinates[1] = -1.0f;
             * coordinates[2] = -1.0f;
             * coordinates[3] = -1.0f;
             *
             * for (uint x = 0; x < headingX; x++, coordinates[0] += xDelta)
             * {
             *  // Reset the y1 coordinate and then loop through all possible values of y1
             *  coordinates[1] = -1.0f;
             *  for (uint y = 0; y < headingY; y++, coordinates[1] += yDelta)
             *  {
             *      // Reset the x2 coordinate and then loop through all possible values of x2
             *      coordinates[2] = -1.0f;
             *      for (uint x2 = 0; x2 < headingX; x2++, coordinates[2] += xDelta)
             *      {
             *          // Reset the y2 coordinate then loop through all possible values of y2
             *          coordinates[3] = -1.0f;
             *          for (uint y2 = 0; y2 < headingY; y2++, coordinates[3] += yDelta)
             *          {
             *              // Don't query for (0,0) - this substrate does not have a node in the center of the heading plane
             *              if (!((coordinates[0] == 0.0f && coordinates[1] == 0.0f) || (coordinates[2] == 0.0f && coordinates[3] == 0.0f)))
             *              {
             *                  // Set the CPPN inputs, activate the CPPN, and read the output signals
             *                  network.ClearSignals();
             *                  network.SetInputSignals(coordinates);
             *                  network.MultipleSteps(iterations);
             *                  outputHeading_HiddenH = network.GetOutputSignal(3);
             *
             *                  // Calculate the weight of the HD->HDP connection based on the CPPN output
             *                  if (Math.Abs(outputHeading_HiddenH) > threshold)
             *                  {
             *                      float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
             *                      connections.Add(new ConnectionGene(connectionCounter++, 3 * colorArraySize + (x + headingX * y), (x2 + headingX * y2) + inputCount + outputCount + colorArraySize, weight));
             *                      //Console.WriteLine("HD Generated connection from " + (3 * colorArraySize + (x + headingX * y)) + " to " + ((x2 + headingX * y2) + inputCount + outputCount + colorArraySize));
             *                  }
             *              }
             *          }
             *      }
             *  }
             * }*/


            xDelta = 1.0f;
            uint headingX = 3;

            coordinates[0] = -1.0f;
            coordinates[1] = -1.0f;

            // Determine all connections from H row 1
            for (uint src = 300; src < 303; src++, coordinates[0] += xDelta)
            {
                // Reset the x2 coordinate and then loop through all possible values of x2
                coordinates[2] = -1.0f;
                for (uint tgt = 412; tgt < 415; tgt++, coordinates[2] += xDelta)
                {
                    // Query the CPPN for the connections between H row 1 and HDP row 1
                    coordinates[3] = -1.0f;

                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }

                    // Query the CPPN for the connections between H row 1 and HDP row 3
                    coordinates[3] = 1.0f;

                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt + 5, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + (tgt + 5) + " with weight " + weight);
                    }
                }
            }

            coordinates[0] = -1.0f;
            coordinates[1] = 1.0f;

            // Determine all connections from H row 3
            for (uint src = 305; src < 308; src++, coordinates[0] += xDelta)
            {
                // Reset the x2 coordinate and then loop through all possible values of x2
                coordinates[2] = -1.0f;
                for (uint x2 = 0; x2 < headingX; x2++, coordinates[2] += xDelta)
                {
                    // Query the CPPN for the connections between H row 3 and HDP row 1
                    coordinates[3] = -1.0f;

                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, 412 + x2, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + (412 + x2) + " with weight " + weight);
                    }

                    // Query the CPPN for the connections between H row 3 and HDP row 3
                    coordinates[3] = 1.0f;

                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, 417 + x2, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + (417 + x2) + " with weight " + weight);
                    }
                }
            }

            // Determine the connections for the H and HDP middle rows

            // Query the CPPN for the connections between H row 1 and HDP row 2
            coordinates[0] = -1.0f;
            coordinates[1] = -1.0f;
            coordinates[3] = 0.0f;
            for (uint src = 300; src < 303; src++, coordinates[0] += xDelta)
            {
                coordinates[2] = -1.0f;
                for (uint tgt = 415; tgt < 417; tgt++, coordinates[2] += 2.0f)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }
                }
            }

            // Query the CPPN for the connections between H row 2 and HDP row 2
            coordinates[0] = -1.0f;
            coordinates[1] = 0.0f;
            coordinates[3] = 0.0f;

            for (uint src = 303; src < 305; src++, coordinates[0] += 2.0f)
            {
                coordinates[2] = -1.0f;
                for (uint tgt = 415; tgt < 417; tgt++, coordinates[2] += 2.0f)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }
                }
            }


            // Query the CPPN for the connections between H row 3 and HDP row 2
            coordinates[0] = -1.0f;
            coordinates[1] = 1.0f;
            coordinates[3] = 0.0f;
            for (uint src = 305; src < 308; src++, coordinates[0] += xDelta)
            {
                coordinates[2] = -1.0f;
                for (uint tgt = 415; tgt < 417; tgt++, coordinates[2] += 2.0f)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }
                }
            }

            // Query the CPPN for the connections between H row 2 and HDP row 1
            coordinates[1] = 0.0f;
            coordinates[2] = -1.0f;
            for (uint src = 303; src < 305; src++, coordinates[2] += 2.0f)
            {
                coordinates[0] = -1.0f;
                coordinates[3] = -1.0f;
                for (uint tgt = 412; tgt < 415; tgt++, coordinates[0] += xDelta)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }
                }

                // Query the CPPN for the connections between H row 2 and HDP row 3
                coordinates[0] = -1.0f;
                coordinates[3] = 1.0f;
                for (uint tgt = 417; tgt < 420; tgt++, coordinates[0] += xDelta)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signals
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHeading_HiddenH = network.GetOutputSignal(3);

                    // Calculate the weight of the HD->HDP connection based on the CPPN output
                    if (Math.Abs(outputHeading_HiddenH) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHeading_HiddenH) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHeading_HiddenH));
                        connections.Add(new ConnectionGene(connectionCounter++, src, tgt, weight));
                        //Console.WriteLine("Generated connection from " + src + " to " + tgt + " with weight " + weight);
                    }
                }
            }

            // Query the CPPN for the connections between the RGB hidden layer and movement output nodes
            xDelta = 2.0f / (rgbOneDimension);
            yDelta = 2.0f / (rgbOneDimension);

            coordinates[0] = -1 + xDelta / 2.0f;
            coordinates[1] = -1 + yDelta / 2.0f;

            for (uint x = 0; x < rgbOneDimension; x++, coordinates[0] += xDelta)
            {
                // Reset the y1 coordinate and then loop through all possible values of y1
                coordinates[1] = -1 + yDelta / 2.0f;
                for (uint y = 0; y < rgbOneDimension; y++, coordinates[1] += yDelta)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = -1.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputRGB_DIR = network.GetOutputSignal(4);

                    // Calculate the weight of the RGB->L connection based on the CPPN output
                    if (Math.Abs(outputRGB_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputRGB_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputRGB_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + inputCount + outputCount, 308, weight));
                        //Console.WriteLine("RGB Generated connection from " + ((x + rgbOneDimension * y) + inputCount + outputCount) + " to 310");
                    }

                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = 0.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputRGB_DIR = network.GetOutputSignal(4);
                    outputRGB_P   = network.GetOutputSignal(5);

                    // Calculate the weight of the RGB->S connection based on the CPPN output
                    if (Math.Abs(outputRGB_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputRGB_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputRGB_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + inputCount + outputCount, 309, weight));
                        //Console.WriteLine("RGB Generated connection from " + ((x + rgbOneDimension * y) + inputCount + outputCount) + " to 311");
                    }

                    // Calculate the weight of the RGB->P connection based on the CPPN output
                    if (Math.Abs(outputRGB_P) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputRGB_P) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputRGB_P));
                        connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + inputCount + outputCount, 311, weight));
                        //Console.WriteLine("RGB Generated connection from " + ((x + rgbOneDimension * y) + inputCount + outputCount) + " to 313");
                    }

                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = 1.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputRGB_DIR = network.GetOutputSignal(4);

                    // Calculate the weight of the RGB->R connection based on the CPPN output
                    if (Math.Abs(outputRGB_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputRGB_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputRGB_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, (x + rgbOneDimension * y) + inputCount + outputCount, 310, weight));
                        //Console.WriteLine("RGB Generated connection from " + ((x + rgbOneDimension * y) + inputCount + outputCount) + " to 312");
                    }
                }
            }

            // Query the CPPN for the connections between the Heading hidden and output layers
            xDelta = 1.0f;
            yDelta = 1.0f;

            coordinates[0] = -1.0f;

            // Determine the connections for heading hidden rows 1 and 3
            for (uint src = 412; src < 415; src++, coordinates[0] += xDelta)
            {
                uint srcOffset = 0;
                for (coordinates[1] = -1.0f; coordinates[1] < 3.0f; coordinates[1] += 2.0f)
                {
                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = -1.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHiddenH_DIR = network.GetOutputSignal(6);

                    // Calculate the weight of the RGB->O connection based on the CPPN output
                    if (Math.Abs(outputHiddenH_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, src + srcOffset, 308, weight));
                        //Console.WriteLine("Generated connection from " + (src+srcOffset) + " to 308 with weight " + weight);
                    }

                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = 0.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHiddenH_DIR = network.GetOutputSignal(6);
                    outputHiddenH_P   = network.GetOutputSignal(7);

                    // Calculate the weight of the RGB->O connection based on the CPPN output
                    if (Math.Abs(outputHiddenH_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, src + srcOffset, 309, weight));
                        //Console.WriteLine("Generated connection from " + (src + srcOffset) + " to 309 with weight " + weight);
                    }

                    // Calculate the weight of the RGB->O connection based on the CPPN output
                    if (Math.Abs(outputHiddenH_P) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHiddenH_P) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_P));
                        connections.Add(new ConnectionGene(connectionCounter++, src + srcOffset, 311, weight));
                        //Console.WriteLine("Generated connection from " + (src + srcOffset) + " to 311 with weight " + weight);
                    }

                    // Set the CPPN inputs, activate the CPPN, and read the output signal
                    network.ClearSignals();
                    coordinates[2] = 1.0f;
                    coordinates[3] = 0.0f;
                    network.SetInputSignals(coordinates);
                    network.MultipleSteps(iterations);
                    outputHiddenH_DIR = network.GetOutputSignal(6);

                    // Calculate the weight of the RGB->O connection based on the CPPN output
                    if (Math.Abs(outputHiddenH_DIR) > threshold)
                    {
                        float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                        connections.Add(new ConnectionGene(connectionCounter++, src + srcOffset, 310, weight));
                        //Console.WriteLine("Generated connection from " + (src + srcOffset) + " to 310 with weight " + weight);
                    }

                    srcOffset += 5;
                }
            }

            coordinates[0] = -1.0f;
            coordinates[1] = 0.0f;

            // Determine the connections for heading hidden row 2
            for (uint src = 415; src < 417; src++, coordinates[0] += 2.0f)
            {
                // Set the CPPN inputs, activate the CPPN, and read the output signal
                network.ClearSignals();
                coordinates[2] = -1.0f;
                coordinates[3] = 0.0f;
                network.SetInputSignals(coordinates);
                network.MultipleSteps(iterations);
                outputHiddenH_DIR = network.GetOutputSignal(6);

                // Calculate the weight of the RGB->O connection based on the CPPN output
                if (Math.Abs(outputHiddenH_DIR) > threshold)
                {
                    float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                    connections.Add(new ConnectionGene(connectionCounter++, src, 308, weight));
                    //Console.WriteLine("Generated connection from " + src + " to 308 with weight " + weight);
                }

                // Set the CPPN inputs, activate the CPPN, and read the output signal
                network.ClearSignals();
                coordinates[2] = 0.0f;
                coordinates[3] = 0.0f;
                network.SetInputSignals(coordinates);
                network.MultipleSteps(iterations);
                outputHiddenH_DIR = network.GetOutputSignal(6);
                outputHiddenH_P   = network.GetOutputSignal(7);

                // Calculate the weight of the RGB->O connection based on the CPPN output
                if (Math.Abs(outputHiddenH_DIR) > threshold)
                {
                    float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                    connections.Add(new ConnectionGene(connectionCounter++, src, 309, weight));
                    //Console.WriteLine("Generated connection from " + src + " to 309 with weight " + weight);
                }

                // Calculate the weight of the RGB->O connection based on the CPPN output
                if (Math.Abs(outputHiddenH_P) > threshold)
                {
                    float weight = (float)(((Math.Abs(outputHiddenH_P) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_P));
                    connections.Add(new ConnectionGene(connectionCounter++, src, 311, weight));
                    //Console.WriteLine("Generated connection from " + src + " to 311 with weight " + weight);
                }

                // Set the CPPN inputs, activate the CPPN, and read the output signal
                network.ClearSignals();
                coordinates[2] = 1.0f;
                coordinates[3] = 0.0f;
                network.SetInputSignals(coordinates);
                network.MultipleSteps(iterations);
                outputHiddenH_DIR = network.GetOutputSignal(6);

                // Calculate the weight of the RGB->O connection based on the CPPN output
                if (Math.Abs(outputHiddenH_DIR) > threshold)
                {
                    float weight = (float)(((Math.Abs(outputHiddenH_DIR) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(outputHiddenH_DIR));
                    connections.Add(new ConnectionGene(connectionCounter++, src, 310, weight));
                    //Console.WriteLine("Generated connection from " + src + " to 310 with weight " + weight);
                }
            }


            // Return a genome combining the already-specified neurons with the CPPN-generated connections
            return(new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount));
        }
        // MPS NOT supported by this method
        private NeatGenome.NeatGenome generateMultiGenomeStackES(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            if (useMultiPlaneSubstrate) throw new Exception("MPS not implemented for these parameters");
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));
            float[] coordinates = new float[5];
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }


            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;


            List<PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List<PointF> inputNeuronPositions = getNeuronGroupByType(0);

            uint hiddenCount = 0;

            foreach (float stackCoordinate in stackCoordinates)
            {
                List<PointF> hiddenNeuronPositions = new List<PointF>();
                ConnectionGeneList con = new ConnectionGeneList();
                SubstrateEvolution se = new SubstrateEvolution();
                se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                    SubstrateEvolution.SAMPLE_WIDTH,
                    SubstrateEvolution.SAMPLE_TRESHOLD,
                    SubstrateEvolution.NEIGHBOR_LEVEL,
                    SubstrateEvolution.INCREASE_RESSOLUTION_THRESHOLD,
                    SubstrateEvolution.MIN_DISTANCE,
                    SubstrateEvolution.CONNECTION_TRESHOLD, //0.4. ConnectionThreshold
                    InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref con, ref hiddenNeuronPositions, stackCoordinate);

                // set up the hidden nodes
                for (uint a = 0; a < hiddenNeuronPositions.Count; a++)
                {
                    neurons.Add(new NeuronGene(hiddenCount + a + totalInputCount + totalOutputCount, NeuronType.Hidden, activationFunction));
                }



                foreach (ConnectionGene c in con)
                {
                    if (c.SourceNeuronId < InputCount)
                    {
                        c.SourceNeuronId += agent * InputCount;
                    }
                    else if (c.SourceNeuronId < InputCount + OutputCount)
                    {
                        c.SourceNeuronId = (c.SourceNeuronId - InputCount) + totalInputCount + agent * OutputCount;
                    }
                    else
                    {
                        c.SourceNeuronId = (uint)((c.SourceNeuronId - InputCount - OutputCount) + totalInputCount + totalOutputCount + hiddenCount);
                    }

                    if (c.TargetNeuronId < InputCount)
                    {
                        c.TargetNeuronId += agent * InputCount;
                    }
                    else if (c.TargetNeuronId < InputCount + OutputCount)
                    {
                        c.TargetNeuronId = (c.TargetNeuronId - InputCount) + totalInputCount + agent * OutputCount;
                    }
                    else
                    {
                        c.TargetNeuronId = (uint)((c.TargetNeuronId - InputCount - OutputCount) + totalInputCount + totalOutputCount + hiddenCount);
                    }

                    connections.Add(new ConnectionGene(connectionCounter++, c.SourceNeuronId, c.TargetNeuronId, c.Weight, ref c.coordinates));

                }
                hiddenCount += (uint)hiddenNeuronPositions.Count;
                agent++;

            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
Ejemplo n.º 36
0
        public ESBodyInformation genomeIntoBodyObject(IGenome genome, out bool isEmpty)
        {
            INetwork net = GenomeDecoder.DecodeToModularNetwork((NeatGenome)genome);
            isEmpty = false;

            //we want the genome, so we can acknowledge the genomeID!

            //now convert a network to a set of hidden neurons and connections

            //we'll make body specific function calls later
            var allBodyOutputs = new List<List<float>>();
            var allBodyInputs = new List<PointPair>();
            var indexToConnectionMap = new Dictionary<int, int>();

            List<PointF> inputs, outputs, hiddenNeurons;
            inputs = new List<PointF>();
            outputs = new List<PointF>();
            hiddenNeurons = new List<PointF>();

            //inputs.Add(new PointF(0,0));

            //int initialDepth, ESIterations;
            //uint inputCount, outputCount;
            //float varianceThreshold, bandThreshold;

            ConnectionGeneList connections = new ConnectionGeneList();

            //loop through a grid, defined by some resolution, and test every connection against another using leo

            int resolution = 9;
            //int resolutionHalf = resolution / 2;

            List<PointF> queryPoints = gridQueryPoints(resolution);
            float xDistanceThree = dXDistance(resolution, 3.0f);
            float yDistanceThree = dYDistance(resolution, 3.0f);

            bool useLeo = true;

            int counter = 0;
            Dictionary<long, PointF> conSourcePoints = new Dictionary<long, PointF>();
            Dictionary<long, PointF> conTargetPoints = new Dictionary<long, PointF>();

            //Dictionary<string, List<PointF>> pointsChecked = new Dictionary<string, List<PointF>>();
            //List<PointF> pList;
            int src, tgt;
            //for each points we have
            for(int p1=0; p1 < queryPoints.Count; p1++)
            {
                PointF xyPoint = queryPoints[p1];

                //query against all other points (possibly limiting certain connection lengths
                for(int p2 = p1; p2 < queryPoints.Count; p2++)
                {
                    PointF otherPoint = queryPoints[p2];

                    if (p1 != p2 && (Math.Abs(xyPoint.X - otherPoint.X) < xDistanceThree && Math.Abs(xyPoint.Y - otherPoint.Y) < yDistanceThree))
                    {
                        //if(!pointsChecked.TryGetValue(xyPoint.ToString(), out pList))
                        //{
                        //    pList = new List<PointF>();
                        //    pointsChecked.Add(xyPoint.ToString(), pList);
                        //}
                        //pList.Add(otherPoint);

                        //if (!pointsChecked.TryGetValue(otherPoint.ToString(), out pList))
                        //{
                        //    pList = new List<PointF>();
                        //    pointsChecked.Add(otherPoint.ToString(), pList);
                        //}
                        //pList.Add(xyPoint);

                        //Console.WriteLine("Checking: ({0}, {1}) => ({2}, {3}) ", xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y);

                        float[] outs = queryCPPNOutputs((ModularNetwork)net, xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y, maxXDistanceCenter(xyPoint, otherPoint),  minYDistanceGround(xyPoint, otherPoint));
                        float weight = outs[0];

                        allBodyInputs.Add(new PointPair(xyPoint, otherPoint));
                        allBodyOutputs.Add(new List<float>(outs));

                        if (useLeo )
                        {

                            if (outs[1] > 0)
                            {
                                //Console.WriteLine("XY: " + xyPoint + " Other: " + otherPoint + " LEO : " + outs[1]) ;

                                //Console.WriteLine(" XDist: " + sqrt(xDistanceSq(xyPoint, otherPoint))
                                //    + " yDist : " + sqrt(yDistanceSq(xyPoint, otherPoint))
                                //    + " MaxDist: " + maxXDistanceCenter(xyPoint, otherPoint))
                                   //+ " MinY: " + minYDistanceGround(xyPoint, otherPoint));
                                //Console.WriteLine();

                                //add to hidden neurons
                                if (!hiddenNeurons.Contains(xyPoint))
                                    hiddenNeurons.Add(xyPoint);

                                src = hiddenNeurons.IndexOf(xyPoint);

                                if (!hiddenNeurons.Contains(otherPoint))
                                    hiddenNeurons.Add(otherPoint);

                                tgt = hiddenNeurons.IndexOf(otherPoint);

                                conSourcePoints.Add(counter, xyPoint);
                                conTargetPoints.Add(counter, otherPoint);

                                indexToConnectionMap.Add(allBodyOutputs.Count-1, counter);
                                connections.Add(new ConnectionGene(counter++, (src), (tgt), weight * HyperNEATParameters.weightRange, new float[] { xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y }, outs));

                            }
                        }
                        else
                        {
                            //add to hidden neurons
                            if (!hiddenNeurons.Contains(xyPoint))
                                hiddenNeurons.Add(xyPoint);

                            src = hiddenNeurons.IndexOf(xyPoint);

                            if (!hiddenNeurons.Contains(otherPoint))
                                hiddenNeurons.Add(otherPoint);

                            tgt = hiddenNeurons.IndexOf(otherPoint);

                            conSourcePoints.Add(counter, xyPoint);
                            conTargetPoints.Add(counter, otherPoint);

                            indexToConnectionMap.Add(allBodyOutputs.Count - 1, counter);
                            connections.Add(new ConnectionGene(counter++, (src), (tgt), weight * HyperNEATParameters.weightRange, new float[] { xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y }, outs));

                        }

                        //PointF newp = new PointF(p.x2, p.y2);

                        //targetIndex = hiddenNeurons.IndexOf(newp);
                        //if (targetIndex == -1)
                        //{
                        //    targetIndex = hiddenNeurons.Count;
                        //    hiddenNeurons.Add(newp);
                        //}
                        //connections.Add(new ConnectionGene(counter++, (sourceIndex), (targetIndex + inputCount + outputCount), p.weight * HyperNEATParameters.weightRange, new float[] { p.x1, p.y1, p.x2, p.y2 }, p.Outputs));

                    }
                }

            }

            //esSubstrate.generateSubstrate(inputs, outputs, net,
            //    HyperNEATParameters.initialDepth,
            //    (float)HyperNEATParameters.varianceThreshold,
            //     (float)HyperNEATParameters.bandingThreshold,
            //    HyperNEATParameters.ESIterations,
            //     (float)HyperNEATParameters.divisionThreshold,
            //    HyperNEATParameters.maximumDepth,
            //    (uint)inputs.Count, (uint)outputs.Count,
            //    ref connections, ref hiddenNeurons, true);

            //generateSubstrate(List<System.Drawing.PointF> inputNeuronPositions, List<PointF> outputNeuronPositions,
            //INetwork genome, int initialDepth, float varianceThreshold, float bandThreshold, int ESIterations,
            //                                    float divsionThreshold, int maxDepth,
            //                                    uint inputCount, uint outputCount,
            //                                    ref  ConnectionGeneList connections, ref List<PointF> hiddenNeurons)

            //blow out the object, we don't care about testing it

            //foreach (var pPair in pointsChecked)
            //{
            //    Console.WriteLine("Checking: " + pPair.Key + " processed: ");

            //    foreach (var xyPoint in pPair.Value)
            //    {
            //        Console.WriteLine("({0}, {1}) ", xyPoint.X, xyPoint.Y);
            //    }
            //}

            var beforeConn = connections.Count;
            var beforeNeuron = hiddenNeurons.Count;
            //var hiddenCopy = new List<PointF>(hiddenNeurons);

            ensureSingleConnectedStructure(connections, hiddenNeurons, conSourcePoints, conTargetPoints);

            if (hiddenNeurons.Count > 20 || connections.Count > 100)
            {
                hiddenNeurons = new List<PointF>();
                connections = new ConnectionGeneList();
            }

            if (hiddenNeurons.Count == 0 || connections.Count == 0)
                isEmpty = true;

            NeatGenome ng = (NeatGenome)genome;

            bool behaviorExists = (ng.Behavior != null);

            ESBodyInformation esbody = new ESBodyInformation() {
                AllBodyOutputs = allBodyOutputs,
                AllBodyInputs = allBodyInputs,
                indexToConnection = indexToConnectionMap,
                //PreHiddenLocations = hiddenCopy,
                BeforeNeuron = beforeNeuron,
                BeforeConnection = beforeConn,
                GenomeID = genome.GenomeId,
                Connections = connections,
                HiddenLocations = hiddenNeurons,
                InputLocations = inputs,
                Objectives = ng.objectives,
                Fitness =  ng.Fitness,
                Locality = ng.locality,
                useLEO = useLeo
            };
            Console.WriteLine(" Nodes: " + hiddenNeurons.Count + " Connections: " + connections.Count);

            return esbody;
        }
        // MPS support on the Hive methods only
        #region Generate heterogenous genomes with z-stack

        // MPS NOT supported by this method
        private NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            if (useMultiPlaneSubstrate) throw new Exception("MPS not implemented for these parameters");
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));
            float[] coordinates = new float[5];
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {

                            //-----------------Get the bias of the source node
                            switch (ng.GroupType)
                            {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                            }
                            coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.RecursiveActivation();//network.MultipleSteps(iterations);

                            neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                    case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.RecursiveActivation();//network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A = network.GetOutputSignal(2);
                                    B = network.GetOutputSignal(3);
                                    C = network.GetOutputSignal(4);
                                    D = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates));
                                    }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
Ejemplo n.º 38
0
        public virtual NeatGenome.NeatGenome generateGenome(INetwork network)
        {
            int maxIterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            double epsilon = 0.0;

            uint firstBias = 0;
            uint lastBias = biasCount;
            uint firstInput = biasCount;
            uint lastInput = biasCount + inputCount;
            uint firstOutput = biasCount + inputCount;
            uint lastOutput = biasCount + inputCount + outputCount;
            uint firstHidden = biasCount + inputCount + outputCount;
            uint lastHidden = biasCount + inputCount + outputCount + hiddenCount;

            float[] coordinates = new float[4];
            float output;
            uint connectionCounter = 0;
            ConnectionGeneList connections = new ConnectionGeneList();

            // give bias inputs to all hidden and output nodes.
            // the source of the the link is located at (0,0), the target is each node, and the weight of the link is the second output of CPPN.
            coordinates[0] = 0;
            coordinates[1] = 0;
            for (uint bias = firstBias; bias < lastBias; bias++) {
                // link the bias to all hidden nodes.
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                coordinates[3] = 0;
                for (uint hidden = firstHidden; hidden < lastHidden; hidden++) {
                    coordinates[2] += hiddenDelta;
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(1);

                    if (Math.Abs(output) > threshold) {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, bias, hidden, weight));
                    }
                }

                // link the bias to all output nodes.
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint outp = firstOutput; outp < lastOutput; outp++) {
                    coordinates[2] += outputDelta;
                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(1);

                    if (Math.Abs(output) > threshold) {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, bias, outp, weight));
                    }
                }
            }

            if (hiddenCount > 0) {
                // link all input nodes to all hidden nodes.
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + hiddenDelta / 2.0f;
                coordinates[3] = 0;
                for (uint input = firstInput; input < lastInput; input++) {
                    coordinates[0] += inputDelta;
                    coordinates[2] = -1 + hiddenDelta / 2.0f;
                    for (uint hidden = firstHidden; hidden < lastHidden; hidden++) {
                        coordinates[2] += hiddenDelta;
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RelaxNetwork(maxIterations, epsilon);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold) {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, hidden, weight));
                        }
                    }
                }

                // link all hidden nodes to all output nodes.
                coordinates[0] = -1 + hiddenDelta / 2.0f;
                coordinates[1] = 0;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint hidden = firstHidden; hidden < lastHidden; hidden++) {
                    coordinates[0] += hiddenDelta;
                    coordinates[2] = -1 + outputDelta / 2.0f;
                    for (uint outp = firstOutput; outp < lastOutput; outp++) {
                        coordinates[2] += outputDelta;
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RelaxNetwork(maxIterations, epsilon);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold) {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, hidden, outp, weight));
                        }
                    }
                }

            } else { // there are no hidden nodes, so connect each input to each output only.
                // link all input nodes to all output nodes.
                coordinates[0] = -1 + inputDelta / 2.0f;
                coordinates[1] = -1;
                coordinates[2] = -1 + outputDelta / 2.0f;
                coordinates[3] = 1;
                for (uint input = firstInput; input < lastInput; input++) {
                    coordinates[0] += inputDelta;
                    coordinates[2] = -1 + outputDelta / 2.0f;
                    for (uint outp = firstOutput; outp < lastOutput; output++) {
                        coordinates[2] += outputDelta;
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RelaxNetwork(maxIterations, epsilon);
                        output = network.GetOutputSignal(0);

                        if (Math.Abs(output) > threshold) {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, input, outp, weight));
                        }
                    }
                }
            }

            return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
        }
Ejemplo n.º 39
0
        public NeatGenome.NeatGenome generateMultiGenomeModulus(INetwork network, uint numberOfAgents)
        {
            #if OUTPUT
            System.IO.StreamWriter sw = new System.IO.StreamWriter("testfile.txt");
            #endif
            float[] coordinates = new float[4];
            float output;
            uint connectionCounter = 0;

            uint inputsPerAgent = inputCount / numberOfAgents;
            uint hiddenPerAgent = hiddenCount / numberOfAgents;
            uint outputsPerAgent = outputCount / numberOfAgents;

            ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount*hiddenCount)+(hiddenCount*outputCount)));

            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            coordinates[0] = -1 + inputDelta / 2.0f;    //x1
            coordinates[1] = -1;                        //y1
            coordinates[2] = -1 + hiddenDelta / 2.0f;   //x2
            coordinates[3] = 0;                         //y2

            for (uint agent = 0; agent < numberOfAgents; agent++)
            {
                coordinates[0] = -1 + (agent * inputsPerAgent * inputDelta) + inputDelta / 2.0f;
                for (uint source = 0; source < inputsPerAgent; source++, coordinates[0] += inputDelta)
                {
                    coordinates[2] = -1 + (agent * hiddenPerAgent * hiddenDelta) + hiddenDelta / 2.0f;
                    for (uint target = 0; target < hiddenPerAgent; target++, coordinates[2] += hiddenDelta)
                    {

                        //Since there are an equal number of input and hidden nodes, we check these everytime
                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        ((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
                        output = network.GetOutputSignal(0);
            #if OUTPUT
                            foreach (double d in inputs)
                                sw.Write(d + " ");
                            sw.Write(output);
                            sw.WriteLine();
            #endif
                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++, (agent*inputsPerAgent) + source, (agent*hiddenPerAgent) + target + inputCount + outputCount, weight));
                        }

                        //Since every other hidden node has a corresponding output node, we check every other time
                        if (target % 2 == 0)
                        {
                            network.ClearSignals();
                            coordinates[1] = 0;
                            coordinates[3] = 1;
                            network.SetInputSignals(coordinates);
                            ((FloatFastConcurrentNetwork)network).MultipleStepsWithMod(iterations, (int)numberOfAgents);
                            output = network.GetOutputSignal(0);
            #if OUTPUT
                            foreach (double d in inputs)
                                sw.Write(d + " ");
                            sw.Write(output);
                            sw.WriteLine();
            #endif
                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, (agent*hiddenPerAgent) + source + inputCount + outputCount, ((outputsPerAgent * agent) + ((target) / 2)) + inputCount, weight));
                            }
                            coordinates[1] = -1;
                            coordinates[3] = 0;

                        }
                    }
                }
            }
            #if OUTPUT
            sw.Flush();
            #endif
            //Console.WriteLine(count);
            //Console.ReadLine();
            return new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
        }
    public override NeatGenome generateGenome(INetwork network)
    {
        // copy the neuron list to a new list and update the x/y values
        NeuronGeneList newNeurons = new NeuronGeneList(neurons);

        // set the x and y value of the SUPGs
        foreach (NeuronGene neuron in newNeurons)
        {
            /*if (neuron.NeuronType == NeuronType.Hidden)
             * {*/
            // switch to grid substrate configuration
            Point point = GetCustomPos(neuron.InnovationId);
            neuron.XValue = point.X;
            neuron.YValue = point.Y;
            if (neuron.NeuronType != NeuronType.Input)
            {
                neuron.ActivationFunction = new SteepenedSigmoid();
            }

            /*neuron.TimeConstant = 1;
             * neuron.NeuronBias = 0;*/
            /*neuron.XValue = getXPos3(neuron.InnovationId);
            *  neuron.YValue = getYPos3(neuron.InnovationId);*/
            //}
        }

        ConnectionGeneList connections = new ConnectionGeneList((int)((inputCount * hiddenCount) + (hiddenCount * outputCount)));

        float[] coordinates = new float[5];
        float   output;
        uint    connectionCounter = 0;
        int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

        // connect hidden layer to outputs

        /*for (uint source = 0; source < hiddenCount; source++)
         * {
         *  coordinates[0] = getXPos(source, false);
         *  coordinates[1] = getYPos(source, false);
         *
         *  for (uint target = 0; target < outputCount; target++)
         *  {
         *      // only connect hidden nodes to their single nearest output
         *      if (source == target)
         *      {
         *          coordinates[2] = getXPos(target, true);
         *          coordinates[3] = getYPos(target, true);
         *
         *          // GWM - fixing weight to 1 for SUPG producing motor outputs
         *          connections.Add(new ConnectionGene(connectionCounter++, source + inputCount + outputCount, target + inputCount, 1));
         *      }
         *  }
         * }*/

        // Connections from input to hidden
        for (uint source = 0; source < inputCount; source++)
        {
            //connections.Add(new ConnectionGene(connectionCounter++, source, 6, getActivation(network,source,6,newNeurons)));
            connections.Add(new ConnectionGene(connectionCounter++, source, 9, getActivation(network, source, 9, newNeurons)));
            connections.Add(new ConnectionGene(connectionCounter++, source, 10, getActivation(network, source, 10, newNeurons)));
            connections.Add(new ConnectionGene(connectionCounter++, source, 11, getActivation(network, source, 11, newNeurons)));
        }
        // Connection from input to output
        connections.Add(new ConnectionGene(connectionCounter++, 0, 6, getActivation(network, 0, 6, newNeurons)));
        connections.Add(new ConnectionGene(connectionCounter++, 3, 6, getActivation(network, 3, 6, newNeurons)));
        // Connections from hidden to hidden
        for (uint source = 0; source < hiddenCount - 2; source++)
        {
            uint tmpSource = source + inputCount + outputCount;
            connections.Add(new ConnectionGene(connectionCounter++, tmpSource, 12, getActivation(network, tmpSource, 12, newNeurons)));
            connections.Add(new ConnectionGene(connectionCounter++, tmpSource, 13, getActivation(network, tmpSource, 13, newNeurons)));
        }
        // Connections from hidden to output
        for (uint source = 0; source < 2; source++)
        {
            uint tmpSource = source + inputCount + outputCount + 3;
            connections.Add(new ConnectionGene(connectionCounter++, tmpSource, 4, getActivation(network, tmpSource, 4, newNeurons)));
            connections.Add(new ConnectionGene(connectionCounter++, tmpSource, 5, getActivation(network, tmpSource, 5, newNeurons)));
        }

        return(new SharpNeatLib.NeatGenome.NeatGenome(0, newNeurons, connections, (int)inputCount, (int)outputCount));
    }
        private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            float[] coordinates = new float[4];
            float   output;
            uint    connectionCounter = 0;
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount;
            uint totalInputCount  = InputCount;
            uint totalHiddenCount = HiddenCount;

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];


            uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;

            foreach (NeuronGroup ng in neuronGroups)
            {
                foreach (uint connectedTo in ng.ConnectedTo)
                {
                    connectedNG = getNeuronGroup(connectedTo);

                    sourceCount = 0;
                    foreach (PointF source in ng.NeuronPositions)
                    {
                        targetCout = 0;
                        foreach (PointF target in connectedNG.NeuronPositions)
                        {
                            switch (ng.GroupType)
                            {
                            case 0: sourceID = ng.GlobalID + sourceCount; break;                                      //Input

                            case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                    //Output

                            case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break; //Hidden
                            }

                            switch (connectedNG.GroupType)
                            {
                            case 0: targetID = connectedNG.GlobalID + targetCout; break;

                            case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;

                            case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
                            }

                            //calculate bias of target node
                            if (!biasCalculated[targetID])
                            {
                                coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                biasCalculated[targetID]    = true;
                            }

                            coordinates[0] = source.X;
                            coordinates[1] = source.Y;
                            coordinates[2] = target.X;
                            coordinates[3] = target.Y;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            ((ModularNetwork)network).RecursiveActivation();
                            //network.MultipleSteps(iterations);
                            output = network.GetOutputSignal(0);

                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                            }
                            //else
                            //{
                            //    Console.WriteLine("Not connected");
                            //}
                            targetCout++;
                        }
                        sourceCount++;
                    }
                }
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable  = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
            return(gn);
        }
        public NeatGenome GenerateGenome(INetwork network)
        {
            int maxIterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            // TODO:
            maxIterations = Math.Min(maxIterations, 4);

            double epsilon     = 0.0;
            double threshold   = HyperNEATParameters.threshold;
            double weightRange = HyperNEATParameters.weightRange;

            // store constant ids for later references

            uint biasid       = 0u;
            uint inputsStart  = m_useBias ? 1u : 0u;
            uint inputsEnd    = (uint)(inputsStart + m_inputsCount);
            uint outputsStart = inputsEnd;
            uint outputsEnd   = (uint)(outputsStart + m_outputsCount);

            float[]            coordinates = new float[4];
            float              output;
            uint               connectionCounter = 0;
            ConnectionGeneList connections       = new ConnectionGeneList();

            if (m_useBias)
            {
                // we use the bias neuron on the center, and use the
                // 2nd output of the CPPN to compute its weight
                coordinates[0] = 0.0f;
                coordinates[1] = 0.0f;

                // add the bias link to all output neurons
                for (int ni = 0; ni < m_outputsCount; ni++)
                {
                    float tempX, tempY;
                    GetOutputNodeCoordinate(ni, out tempX, out tempY);
                    coordinates[2] = tempX;
                    coordinates[3] = tempY;

                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(m_cppnOutIndex + 1);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, biasid, (uint)(outputsStart + ni), weight));
                    }
                }
            }

            // now add possible connections between all input output neuron pairs

            for (int inpi = 0; inpi < m_inputsCount; inpi++)
            {
                float inpx, inpy;
                GetInputNodeCoordinate(inpi, out inpx, out inpy);
                coordinates[0] = inpx;
                coordinates[1] = inpy;

                for (int outi = 0; outi < m_outputsCount; outi++)
                {
                    float outx, outy;
                    GetOutputNodeCoordinate(outi, out outx, out outy);

                    coordinates[2] = outx;
                    coordinates[3] = outy;

                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(m_cppnOutIndex);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++,
                                                           (uint)(inputsStart + inpi), (uint)(outputsStart + outi), weight));
                    }
                }
            }

            return(new SharpNeatLib.NeatGenome.NeatGenome(0, m_neurons, connections, m_inputsCount, m_outputsCount));
        }
        public NeatGenome.NeatGenome generateGenomeStackSituationalPolicy(INetwork network, List <float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet, float signal)
        {
            // Schrum: For debugging
            //Console.WriteLine("generateGenomeStackSituationalPolicy:signal=" + signal);
            //Console.WriteLine("CPPN inputs = " + network.InputNeuronCount);

            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));

            // Schrum: Too many inputs: Only store those that are needed
            //float[] coordinates = new float[5 + 1]; // <-- Schrum: bit sloppy: frequently results in unused CPPN inputs. Should make more precise
            float[] coordinates = new float[network.InputNeuronCount]; // Schrum: CPPN tracks how many inputs it needs
            float   output;
            uint    connectionCounter = 0;
            float   agentDelta        = 2.0f / (numberOfAgents - 1);
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount  = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            // Schrum: If we are inside this function, then we either have a heterogeneous team
            //         of a single agent (not sure why that ended up being the case; odd use of homogeneousTeam).
            //         Therefore, numberOfAgents tells us whether we need to save space for a Z-coordinate,
            //         and whether we are expecting a Situation input.
            if (numberOfAgents == 1 && coordinates.Length > 4)
            {
                coordinates[4] = signal; // No Z coord, but save situation
            }
            else if (coordinates.Length > 5)
            {
                coordinates[5] = signal; // Both Z coord and situation
            }
            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in thisorder: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint  agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                // Schrum: Only include Z-coord as input if there are multiple team members
                if (numberOfAgents > 1)
                {
                    coordinates[4] = stackCoordinate; // Schrum: z-coord will always be at index 4
                }
                // Schrum: Debug
                //Console.WriteLine("CPPN inputs (first 4 blank): " + string.Join(",", coordinates));


                uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;


                        foreach (PointF source in ng.NeuronPositions)
                        {
                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;

                                //Input
                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;

                                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;      //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;

                                case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;

                                case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //--- bias
                                //-----------------Get the bias of the target node
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID]    = true;
                                }
                                //--bias



                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                // Schrum: Debug
                                //Console.WriteLine("CPPN inputs: " + string.Join(",", coordinates));

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                // Schrum: Observation: It seems impossible to use both LEO and adaptive networks because of these hardcoded magic numbers
                                if (adaptiveNetwork)
                                {
                                    A            = network.GetOutputSignal(2);
                                    B            = network.GetOutputSignal(3);
                                    C            = network.GetOutputSignal(4);
                                    D            = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                // Schrum: Observation: In long run, might be desirable to use LEO, but incompatible with special preference neuron output
                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo       = network.GetOutputSignal(2);
                                }

                                // Schrum: This is a horrible hack, but it gets the job done for now.
                                // The reason this works is that it makes the following assumptions that could easily be broken in the future:
                                // 1) It is assumed that the only reason a CPPN would have 3 outputs per policy is if the third is for preference links
                                // 2) It is assumed that in a substrate with a preference neuron, the y-coord will always be 0.8, and no other neuron will have
                                //    that y-coord.
                                //Console.WriteLine("output:" + coordinates[0] + "," + coordinates[1] + ":" + coordinates[2] + "," + coordinates[3]);
                                //Console.WriteLine("network.OutputsPerPolicy == 3" + (network.OutputsPerPolicy == 3));
                                //Console.WriteLine("target.Y == 0.8" + (target.Y == 0.8f));
                                if (network.OutputsPerPolicy == 3 && target.Y == 0.8f)
                                {
                                    // The output from the link for the preference neuron replaces the standard output.
                                    // Because the link weight is defined by a totally different CPPN output, the preference
                                    // neuron is more free to behave very differently.
                                    output = network.GetOutputSignal(2);
                                    //Console.WriteLine("Preference output:" + coordinates[0] + "," + coordinates[1] + ":" + coordinates[2] + "," + coordinates[3]);
                                }

                                if (!useLeo || leo > 0.0)
                                {
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight =
                                            (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) *
                                                    weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive networkset weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new
                                                        ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref
                                                                       coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable  = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;

            // Schrum: Debugging
            // Looking at the control networks has revealed that the order of details in the substrate
            // description is important. The layer with the preference neuron has to be defined last
            // if it is to be the final neuron in the linearly organized output layer.
            //XmlDocument doc = new XmlDocument();
            //SharpNeatLib.NeatGenome.Xml.XmlGenomeWriterStatic.Write(doc, sng);
            //System.IO.FileInfo oFileInfo = new System.IO.FileInfo("temp.xml");
            //doc.Save(oFileInfo.FullName);

            return(sng);
        }
        /// <summary>
        ///     TODO
        /// </summary>
        /// <param name="birthGeneration">
        ///     The current evolution algorithm generation.
        ///     Assigned to the new genome as its birth generation.
        /// </param>
        public override NeatGenome CreateGenome(uint birthGeneration)
        {
            NeuronGeneList neuronGeneList = new NeuronGeneList(_inputNeuronCount + _outputNeuronCount);
            NeuronGeneList inputNeuronGeneList = new NeuronGeneList(_inputNeuronCount); // includes single bias neuron.
            NeuronGeneList outputNeuronGeneList = new NeuronGeneList(_outputNeuronCount);

            // Create a single bias neuron.
            uint biasNeuronId = _innovationIdGenerator.NextId;
            if (0 != biasNeuronId)
            {   // The ID generator must be reset before calling this method so that all generated genomes use the
                // same innovation ID for matching neurons and structures.
                throw new SharpNeatException("IdGenerator must be reset before calling CreateGenome(uint)");
            }

            // Note. Genes within nGeneList must always be arranged according to the following layout plan.
            //   Bias - single neuron. Innovation ID = 0
            //   Input neurons.
            //   Output neurons.
            //   Hidden neurons.
            NeuronGene neuronGene = CreateNeuronGene(biasNeuronId, NodeType.Bias);
            inputNeuronGeneList.Add(neuronGene);
            neuronGeneList.Add(neuronGene);

            // Create input neuron genes.
            for (int i = 0; i < _inputNeuronCount; i++)
            {
                neuronGene = CreateNeuronGene(_innovationIdGenerator.NextId, NodeType.Input);
                inputNeuronGeneList.Add(neuronGene);
                neuronGeneList.Add(neuronGene);
            }

            // Create output neuron genes.
            for (int i = 0; i < _outputNeuronCount; i++)
            {
                neuronGene = CreateNeuronGene(_innovationIdGenerator.NextId, NodeType.Output);
                outputNeuronGeneList.Add(neuronGene);
                neuronGeneList.Add(neuronGene);
            }

            // Define all possible connections between the input and output neurons (fully interconnected).
            int srcCount = inputNeuronGeneList.Count;
            int tgtCount = outputNeuronGeneList.Count;
            ConnectionDefinition[] connectionDefArr = new ConnectionDefinition[srcCount * tgtCount];

            for (int srcIdx = 0, i = 0; srcIdx < srcCount; srcIdx++)
            {
                for (int tgtIdx = 0; tgtIdx < tgtCount; tgtIdx++)
                {
                    connectionDefArr[i++] = new ConnectionDefinition(_innovationIdGenerator.NextId, srcIdx, tgtIdx);
                }
            }

            // Shuffle the array of possible connections.
            Utilities.Shuffle(connectionDefArr, _rng);

            // Select connection definitions from the head of the list and convert them to real connections.
            // We want some proportion of all possible connections but at least one (Connectionless genomes are not allowed).
            int connectionCount = (int)Utilities.ProbabilisticRound(
                (double)connectionDefArr.Length * _neatGenomeParamsComplexifying.InitialInterconnectionsProportion,
                _rng);
            connectionCount = Math.Max(1, connectionCount);

            // Create the connection gene list and populate it.
            ConnectionGeneList connectionGeneList = new ConnectionGeneList(connectionCount);

            #region Add connection to bisas short connections
            NeuronGene srcNeuronGeneACBias = inputNeuronGeneList[0];
            if (!srcNeuronGeneACBias.TargetNeurons.Contains(outputNeuronGeneList[2].InnovationId))
            {
                NeuronGene tgtNeuronGeneAC = outputNeuronGeneList[2];
                ConnectionGene biasGene = new ConnectionGene(_innovationIdGenerator.NextId,
                                                            srcNeuronGeneACBias.InnovationId,
                                                            tgtNeuronGeneAC.InnovationId,
                                                            Math.Abs(GenerateRandomConnectionWeight()));
                connectionGeneList.Add(biasGene);

                // Register connection with endpoint neurons.
                srcNeuronGeneACBias.TargetNeurons.Add(biasGene.TargetNodeId);
                tgtNeuronGeneAC.SourceNeurons.Add(biasGene.SourceNodeId);
            }
            double conW = GenerateRandomConnectionWeight();
            for (int i = 5; i <= 6; i++)
            {
                NeuronGene srcNeuronGeneAC = inputNeuronGeneList[i];
                if (!srcNeuronGeneAC.TargetNeurons.Contains(outputNeuronGeneList[2].InnovationId))
                {
                    NeuronGene tgtNeuronGeneAC = outputNeuronGeneList[2];
                    ConnectionGene biasGene = new ConnectionGene(_innovationIdGenerator.NextId,
                                                                srcNeuronGeneAC.InnovationId,
                                                                tgtNeuronGeneAC.InnovationId,
                                                                -Math.Abs(conW));
                    connectionGeneList.Add(biasGene);

                    // Register connection with endpoint neurons.
                    srcNeuronGeneAC.TargetNeurons.Add(biasGene.TargetNodeId);
                    tgtNeuronGeneAC.SourceNeurons.Add(biasGene.SourceNodeId);
                }

                srcNeuronGeneAC = inputNeuronGeneList[i];
                if (!srcNeuronGeneAC.TargetNeurons.Contains(outputNeuronGeneList[5].InnovationId))
                {
                    NeuronGene tgtNeuronGeneAC = outputNeuronGeneList[5];
                    ConnectionGene biasGene = new ConnectionGene(_innovationIdGenerator.NextId,
                                                                srcNeuronGeneAC.InnovationId,
                                                                tgtNeuronGeneAC.InnovationId,
                                                                -Math.Abs(conW));
                    connectionGeneList.Add(biasGene);

                    // Register connection with endpoint neurons.
                    srcNeuronGeneAC.TargetNeurons.Add(biasGene.TargetNodeId);
                    tgtNeuronGeneAC.SourceNeurons.Add(biasGene.SourceNodeId);
                }
            }
            #endregion

            #region Add connection to bisas connection strength based on distance
            for (int i = 5; i <= 6; i++)
            {
                NeuronGene srcNeuronGeneAC = inputNeuronGeneList[i];
                if (!srcNeuronGeneAC.TargetNeurons.Contains(outputNeuronGeneList[0].InnovationId))
                {
                    NeuronGene tgtNeuronGeneAC = outputNeuronGeneList[0];
                    ConnectionGene biasGene = new ConnectionGene(_innovationIdGenerator.NextId,
                                                                srcNeuronGeneAC.InnovationId,
                                                                tgtNeuronGeneAC.InnovationId,
                                                                Math.Abs(GenerateRandomConnectionWeight()));
                    connectionGeneList.Add(biasGene);

                    // Register connection with endpoint neurons.
                    srcNeuronGeneAC.TargetNeurons.Add(biasGene.TargetNodeId);
                    tgtNeuronGeneAC.SourceNeurons.Add(biasGene.SourceNodeId);
                }
            }
            #endregion
            for (int i = 0; i < connectionCount; i++)
            {
                ConnectionDefinition def = connectionDefArr[i];
                NeuronGene srcNeuronGene = inputNeuronGeneList[def._sourceNeuronIdx];
                NeuronGene tgtNeuronGene = outputNeuronGeneList[def._targetNeuronIdx];

                ConnectionGene cGene = new ConnectionGene(def._innovationId,
                                                        srcNeuronGene.InnovationId,
                                                        tgtNeuronGene.InnovationId,
                                                        GenerateRandomConnectionWeight());
                if (!srcNeuronGene.TargetNeurons.Contains(cGene.TargetNodeId))
                {
                    connectionGeneList.Add(cGene);

                    // Register connection with endpoint neurons.
                    srcNeuronGene.TargetNeurons.Add(cGene.TargetNodeId);
                    tgtNeuronGene.SourceNeurons.Add(cGene.SourceNodeId);
                }
            }

            // Ensure connections are sorted.
            connectionGeneList.SortByInnovationId();

            // Create and return the completed genome object.
            return CreateGenome(_genomeIdGenerator.NextId, birthGeneration,
                                neuronGeneList, connectionGeneList,
                                _inputNeuronCount, _outputNeuronCount, false);
        }
Ejemplo n.º 45
0
        //public static

        public static NeatGenome Read(XmlElement xmlGenome)
        {
            int inputNeuronCount  = 0;
            int outputNeuronCount = 0;

            uint id = uint.Parse(XmlUtilities.GetAttributeValue(xmlGenome, "id"));

            //--- Read neuron genes into a list.
            NeuronGeneList neuronGeneList  = new NeuronGeneList();
            XmlNodeList    listNeuronGenes = xmlGenome.SelectNodes("neurons/neuron");

            foreach (XmlElement xmlNeuronGene in listNeuronGenes)
            {
                NeuronGene neuronGene = ReadNeuronGene(xmlNeuronGene);

                // Count the input and output neurons as we go.
                switch (neuronGene.NeuronType)
                {
                case NeuronType.Input:
                    inputNeuronCount++;
                    break;

                case NeuronType.Output:
                    outputNeuronCount++;
                    break;
                }

                neuronGeneList.Add(neuronGene);
            }

            //--- Read module genes into a list.
            List <ModuleGene> moduleGeneList  = new List <ModuleGene>();
            XmlNodeList       listModuleGenes = xmlGenome.SelectNodes("modules/module");

            foreach (XmlElement xmlModuleGene in listModuleGenes)
            {
                moduleGeneList.Add(ReadModuleGene(xmlModuleGene));
            }

            //--- Read connection genes into a list.
            ConnectionGeneList connectionGeneList  = new ConnectionGeneList();
            XmlNodeList        listConnectionGenes = xmlGenome.SelectNodes("connections/connection");

            foreach (XmlElement xmlConnectionGene in listConnectionGenes)
            {
                connectionGeneList.Add(ReadConnectionGene(xmlConnectionGene));
            }


            // Read behavior list
            NeatGenome g = new NeatGenome(id, neuronGeneList, moduleGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount);

            XmlNode behaviorNode = xmlGenome.SelectSingleNode("behavior");

            if (behaviorNode != null)
            {
                g.Behavior            = ReadBehavior(behaviorNode); //TODO bug is here
                g.Behavior.objectives = new double[6];
                g.objectives          = new double[6];
            }



            /*
             * XmlNode behaviorNode = xmlGenome.SelectSingleNode("behavior");
             * if (behaviorNode != null)
             * {
             *  g.Behavior = ReadBehavior(behaviorNode.SelectSingleNode("list")); //TODO bug is here
             *  g.Behavior.objectives = new double[6];
             *  g.objectives = new double[6];
             * }
             */



            return(g);
        }
Ejemplo n.º 46
0
        public void generateConnections(List <PointF> inputNeuronPositions, List <PointF> outputNeuronPositions,
                                        INetwork genome, float sampleWidth, float sampleThreshold, float neighborLevel,
                                        float increaseResolutionThreshold, float minDistance,
                                        float connectionThreshold,
                                        uint inputCount, uint outputCount,
                                        float minX, float minY, float maxX, float maxY,
                                        ref ConnectionGeneList connections, ref List <PointF> hiddenNeurons, float stackNumber)
        {
            // SubstrateEvolution.stackNumber = stackNumber; //TODO hacky
            //  totalLength = 0.0f;
            List <Rect> rectList = new List <Rect>();

            //minX = -1.0f;
            //minY = -1.0f;
            //maxX = 1.0f;
            //maxY = 1.0f;

            List <ExpressPoint> _connections = new List <ExpressPoint>();

            List <PointF> hiddenPos = new List <PointF>();

            List <PointF> pointToAdd = new List <PointF>();
            float         targetX, targetY;


            double output;
            int    sourceIndex, targetIndex, neuronCount = 0;
            uint   counter = 0;
            float  weight;

            float[] connectionCoordinates = new float[5];

            connectionCoordinates[4] = stackNumber;

            //CONNECTION DIRECTLY FROM INPUT NODES
            List <PointF> tabuList = new List <PointF>();

            foreach (PointF input in inputNeuronPositions)
            {
                Rect startRec = new Rect(input.X, input.Y, true, null, 0, minX, minY, maxX, maxY, genome, stackNumber);
                startRec.createTree(sampleWidth);
                startRec.addPoints(ref _connections, ref sampleThreshold, ref neighborLevel, ref increaseResolutionThreshold, ref minDistance);

                foreach (ExpressPoint p in _connections)
                {
                    targetX = (p.x1 + p.x2) / 2.0f;
                    targetY = (p.y1 + p.y2) / 2.0f;

                    PointF newp = new PointF(targetX, targetY);
                    if (!hiddenPos.Contains(newp))
                    {
                        hiddenPos.Add(newp);
                        tabuList.Add(newp);
                    }
                }
            }
            foreach (ExpressPoint t in _connections)
            {
                connectionCoordinates[0] = t.fixedx;
                connectionCoordinates[1] = t.fixedy;
                connectionCoordinates[2] = (float)(t.x1 + t.x2) / 2.0f;
                connectionCoordinates[3] = (float)(t.y1 + t.y2) / 2.0f;

                //        double dist = Math.Sqrt(Math.Pow(connectionCoordinates[0] - connectionCoordinates[2], 2) +
                //Math.Pow(connectionCoordinates[1] - connectionCoordinates[3], 2));

                if (float.IsNaN(t.activationLevel))
                {
                    Console.WriteLine("Normally this shouldn't happen");
                    return;
                    //
                }
                else
                {
                    output = t.activationLevel;
                }

                //!remove
                //recurrent = (connectionCoordinates[0] == connectionCoordinates[2]) && (connectionCoordinates[1] == connectionCoordinates[3]);
                //
                //   if ((Math.Abs(output) > connectionThreshold)) //&& (pcount % 10 ==0))
                //  {
                PointF source = new PointF(connectionCoordinates[0], connectionCoordinates[1]);
                PointF target = new PointF(connectionCoordinates[2], connectionCoordinates[3]);


                //connectionList.Add(new Connection(x1, y1, x2, y2));

                sourceIndex = inputNeuronPositions.IndexOf(source); //TODO change. computationally expensive
                if (sourceIndex == -1)                              //!hiddenNeurons.Contains(source))
                {
                    Console.WriteLine("This shouldn't happen.");
                    sourceIndex = inputNeuronPositions.Count;
                    // hiddenNeurons.Add(source);
                    //  neuronCount++;
                }

                targetIndex = hiddenNeurons.IndexOf(target);
                if (targetIndex == -1) //!hiddenNeurons.Contains(target)) hiddenNeurons.Add(target);
                {
                    targetIndex = hiddenNeurons.Count;
                    hiddenNeurons.Add(target);
                    neuronCount++;
                }

                weight = (float)(((Math.Abs(output) - (connectionThreshold)) / (1 - connectionThreshold)) * WEIGHT_RANGE * Math.Sign(output));
                //if (weight > 0.0) weight = 1.0f;
                //else weight = -1.0f;

                connections.Add(new ConnectionGene(counter++, (uint)(sourceIndex), (uint)(targetIndex + inputCount + outputCount), weight, ref connectionCoordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                //   }
            }
            //return;//! remove
            _connections.Clear();
            //**************************************
            //Hidden to Hidden
            for (int step = 0; step < MAX_ITER_STEPS; step++)
            {
                pointToAdd.Clear();
                foreach (PointF hiddenP in hiddenPos)
                {
                    Rect startRec = new Rect(hiddenP.X, hiddenP.Y, true, null, 0, minX, minY, maxX, maxY, genome, stackNumber);
                    startRec.createTree(sampleWidth);
                    startRec.addPoints(ref _connections, ref sampleThreshold, ref neighborLevel, ref increaseResolutionThreshold, ref minDistance);

                    foreach (ExpressPoint p in _connections)
                    {
                        //     double dist = Math.Sqrt(Math.Pow(p.x1 - p.x2, 2) + Math.Pow(p.y1 - p.y2, 2));

                        // connectionThreshold*

                        //  if (Math.Abs(p.activationLevel) > connectionThreshold)//(dist / 2.5f + 0.5))
                        //  {
                        targetX = (p.x1 + p.x2) / 2.0f;
                        targetY = (p.y1 + p.y2) / 2.0f;
                        PointF newp = new PointF(targetX, targetY);
                        if (!tabuList.Contains(newp))
                        {
                            pointToAdd.Add(newp);
                            tabuList.Add(newp);
                        }
                        //  }
                        //  if (targetY>input.Y)
                        //
                    }
                }
                hiddenPos.Clear();
                if (pointToAdd.Count == 0)
                {
                    break;
                }
                hiddenPos.AddRange(pointToAdd);
            }


            foreach (ExpressPoint t in _connections)
            {
                connectionCoordinates[0] = t.fixedx;
                connectionCoordinates[1] = t.fixedy;
                connectionCoordinates[2] = (float)(t.x1 + t.x2) / 2.0f;
                connectionCoordinates[3] = (float)(t.y1 + t.y2) / 2.0f;

                //         double dist = Math.Sqrt(Math.Pow(connectionCoordinates[0] - connectionCoordinates[2], 2) +
                // Math.Pow(connectionCoordinates[1] - connectionCoordinates[3], 2));

                if (float.IsNaN(t.activationLevel))
                {
                    Console.WriteLine("Normally this shouldn't happen");
                    return;
                    //
                }
                else
                {
                    output = t.activationLevel;
                }

                //  if ((Math.Abs(output) > connectionThreshold)) //&& (pcount % 10 ==0))
                // {
                PointF source = new PointF(connectionCoordinates[0], connectionCoordinates[1]);
                PointF target = new PointF(connectionCoordinates[2], connectionCoordinates[3]);
                //connectionList.Add(new Connection(x1, y1, x2, y2));

                sourceIndex = hiddenNeurons.IndexOf(source); //TODO change. computationally expensive
                if (sourceIndex == -1)                       //!hiddenNeurons.Contains(source))
                {
                    sourceIndex = hiddenNeurons.Count;
                    hiddenNeurons.Add(source);
                    neuronCount++;
                }

                targetIndex = hiddenNeurons.IndexOf(target);
                if (targetIndex == -1) //!hiddenNeurons.Contains(target)) hiddenNeurons.Add(target);
                {
                    targetIndex = hiddenNeurons.Count;
                    hiddenNeurons.Add(target);
                    neuronCount++;
                }

                weight = (float)(((Math.Abs(output) - (connectionThreshold)) / (1 - connectionThreshold)) * WEIGHT_RANGE * Math.Sign(output));
                //if (weight > 0.0) weight = 1.0f;
                //else weight = -1.0f;

                connections.Add(new ConnectionGene(counter++, (uint)(sourceIndex + inputCount + outputCount), (uint)(targetIndex + inputCount + outputCount), weight, ref connectionCoordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                //   }
            }

            _connections.Clear();

            //CONNECT TO OUTPUT
            foreach (PointF outputPos in outputNeuronPositions)
            {
                Rect startRec = new Rect(outputPos.X, outputPos.Y, false, null, 0, minX, minY, maxX, maxY, genome, stackNumber);
                startRec.createTree(sampleWidth);
                startRec.addPoints(ref _connections, ref sampleThreshold, ref neighborLevel, ref increaseResolutionThreshold, ref minDistance);

                //foreach (ExpressPoint p in _connections)
                //{
                //    targetX = (p.x1 + p.x2) / 2.0f;
                //    targetY = (p.y1 + p.y2) / 2.0f;
                //    //  if (targetY>input.Y)
                //    inputPos.Add(new PointF(targetX, targetY));
                //}
            }
            //GO DEEPER
            foreach (ExpressPoint t in _connections)
            {
                connectionCoordinates[0] = (float)(t.x1 + t.x2) / 2.0f;
                connectionCoordinates[1] = (float)(t.y1 + t.y2) / 2.0f;
                connectionCoordinates[2] = t.fixedx;
                connectionCoordinates[3] = t.fixedy;

                //          double dist = Math.Sqrt(Math.Pow(connectionCoordinates[0] - connectionCoordinates[2], 2) +
                // Math.Pow(connectionCoordinates[1] - connectionCoordinates[3], 2));

                if (float.IsNaN(t.activationLevel))
                {
                    Console.WriteLine("Normally this shouldn't happen");
                    return;
                    //
                }
                else
                {
                    output = t.activationLevel;
                }

                //!remove
                //recurrent = (connectionCoordinates[0] == connectionCoordinates[2]) && (connectionCoordinates[1] == connectionCoordinates[3]);
                //
                //    if ((Math.Abs(output) > connectionThreshold)) //&& (pcount % 10 ==0))
                //   {
                PointF source = new PointF(connectionCoordinates[0], connectionCoordinates[1]);
                PointF target = new PointF(connectionCoordinates[2], connectionCoordinates[3]);
                //connectionList.Add(new Connection(x1, y1, x2, y2));

                sourceIndex = hiddenNeurons.IndexOf(source);            //TODO change. computationally expensive

                //only connect if hidden neuron already exists
                if (sourceIndex != -1)
                {
                    if (sourceIndex == -1) //!hiddenNeurons.Contains(source))
                    {
                        //IF IT DOES NOT EXIST WE COULD POTENTIALLY STOP HERE
                        sourceIndex = hiddenNeurons.Count;
                        hiddenNeurons.Add(source);
                        neuronCount++;
                    }

                    targetIndex = outputNeuronPositions.IndexOf(target);
                    if (targetIndex == -1) //!hiddenNeurons.Contains(target)) hiddenNeurons.Add(target);
                    {
                        Console.WriteLine("SubstrateEvolution: This shouldn't happen");
                        //targetIndex = hiddenNeurons.Count;
                        //hiddenNeurons.Add(target);
                        //neuronCount++;
                    }

                    weight = (float)(((Math.Abs(output) - (connectionThreshold)) / (1 - connectionThreshold)) * WEIGHT_RANGE * Math.Sign(output));
                    //if (weight > 0.0) weight = 1.0f;
                    //else weight = -1.0f;

                    connections.Add(new ConnectionGene(counter++, (uint)(sourceIndex + inputCount + outputCount), (uint)(targetIndex + inputCount), weight, ref connectionCoordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                }
                // }
            }
        }
        private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));
            float[] coordinates = new float[4];
            float output;
            uint connectionCounter = 0;
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount;
            uint totalInputCount = InputCount;
            uint totalHiddenCount = HiddenCount;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;

            foreach (NeuronGroup ng in neuronGroups)
            {
                foreach (uint connectedTo in ng.ConnectedTo)
                {
                    connectedNG = getNeuronGroup(connectedTo);

                    sourceCount = 0;
                    foreach (PointF source in ng.NeuronPositions)
                    {

                        targetCout = 0;
                        foreach (PointF target in connectedNG.NeuronPositions)
                        {
                            switch (ng.GroupType)
                            {
                                case 0: sourceID = ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break;  //Hidden
                            }

                            switch (connectedNG.GroupType)
                            {
                                case 0: targetID = connectedNG.GlobalID + targetCout; break;
                                case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;
                                case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
                            }

                            //calculate bias of target node
                            if (!biasCalculated[targetID])
                            {
                                coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                biasCalculated[targetID] = true;
                            }

                            coordinates[0] = source.X;
                            coordinates[1] = source.Y;
                            coordinates[2] = target.X;
                            coordinates[3] = target.Y;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            ((ModularNetwork)network).RecursiveActivation();
                            //network.MultipleSteps(iterations);
                            output = network.GetOutputSignal(0);

                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                            }
                            //else
                            //{
                            //    Console.WriteLine("Not connected");
                            //}
                            targetCout++;
                        }
                        sourceCount++;
                    }
                }
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
            return gn;
        }
Ejemplo n.º 48
0
        public ESBodyInformation genomeIntoBodyObject(IGenome genome, out bool isEmpty)
        {
            INetwork net = GenomeDecoder.DecodeToModularNetwork((NeatGenome)genome);

            isEmpty = false;

            //we want the genome, so we can acknowledge the genomeID!

            //now convert a network to a set of hidden neurons and connections

            //we'll make body specific function calls later
            var allBodyOutputs       = new List <List <float> >();
            var allBodyInputs        = new List <PointPair>();
            var indexToConnectionMap = new Dictionary <int, int>();

            List <PointF> inputs, outputs, hiddenNeurons;

            inputs        = new List <PointF>();
            outputs       = new List <PointF>();
            hiddenNeurons = new List <PointF>();

            //inputs.Add(new PointF(0,0));

            //int initialDepth, ESIterations;
            //uint inputCount, outputCount;
            //float varianceThreshold, bandThreshold;

            ConnectionGeneList connections = new ConnectionGeneList();


            //loop through a grid, defined by some resolution, and test every connection against another using leo


            int resolution = 9;
            //int resolutionHalf = resolution / 2;

            List <PointF> queryPoints    = gridQueryPoints(resolution);
            float         xDistanceThree = dXDistance(resolution, 3.0f);
            float         yDistanceThree = dYDistance(resolution, 3.0f);


            bool useLeo = true;

            int counter = 0;
            Dictionary <long, PointF> conSourcePoints = new Dictionary <long, PointF>();
            Dictionary <long, PointF> conTargetPoints = new Dictionary <long, PointF>();


            //Dictionary<string, List<PointF>> pointsChecked = new Dictionary<string, List<PointF>>();
            //List<PointF> pList;
            int src, tgt;

            //for each points we have
            for (int p1 = 0; p1 < queryPoints.Count; p1++)
            {
                PointF xyPoint = queryPoints[p1];

                //query against all other points (possibly limiting certain connection lengths
                for (int p2 = p1; p2 < queryPoints.Count; p2++)
                {
                    PointF otherPoint = queryPoints[p2];

                    if (p1 != p2 && (Math.Abs(xyPoint.X - otherPoint.X) < xDistanceThree && Math.Abs(xyPoint.Y - otherPoint.Y) < yDistanceThree))
                    {
                        //if(!pointsChecked.TryGetValue(xyPoint.ToString(), out pList))
                        //{
                        //    pList = new List<PointF>();
                        //    pointsChecked.Add(xyPoint.ToString(), pList);
                        //}
                        //pList.Add(otherPoint);

                        //if (!pointsChecked.TryGetValue(otherPoint.ToString(), out pList))
                        //{
                        //    pList = new List<PointF>();
                        //    pointsChecked.Add(otherPoint.ToString(), pList);
                        //}
                        //pList.Add(xyPoint);

                        //Console.WriteLine("Checking: ({0}, {1}) => ({2}, {3}) ", xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y);

                        float[] outs   = queryCPPNOutputs((ModularNetwork)net, xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y, maxXDistanceCenter(xyPoint, otherPoint), minYDistanceGround(xyPoint, otherPoint));
                        float   weight = outs[0];

                        allBodyInputs.Add(new PointPair(xyPoint, otherPoint));
                        allBodyOutputs.Add(new List <float>(outs));


                        if (useLeo)
                        {
                            if (outs[1] > 0)
                            {
                                //Console.WriteLine("XY: " + xyPoint + " Other: " + otherPoint + " LEO : " + outs[1]) ;

                                //Console.WriteLine(" XDist: " + sqrt(xDistanceSq(xyPoint, otherPoint))
                                //    + " yDist : " + sqrt(yDistanceSq(xyPoint, otherPoint))
                                //    + " MaxDist: " + maxXDistanceCenter(xyPoint, otherPoint))
                                //+ " MinY: " + minYDistanceGround(xyPoint, otherPoint));
                                //Console.WriteLine();

                                //add to hidden neurons
                                if (!hiddenNeurons.Contains(xyPoint))
                                {
                                    hiddenNeurons.Add(xyPoint);
                                }

                                src = hiddenNeurons.IndexOf(xyPoint);

                                if (!hiddenNeurons.Contains(otherPoint))
                                {
                                    hiddenNeurons.Add(otherPoint);
                                }

                                tgt = hiddenNeurons.IndexOf(otherPoint);

                                conSourcePoints.Add(counter, xyPoint);
                                conTargetPoints.Add(counter, otherPoint);

                                indexToConnectionMap.Add(allBodyOutputs.Count - 1, counter);
                                connections.Add(new ConnectionGene(counter++, (src), (tgt), weight * HyperNEATParameters.weightRange, new float[] { xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y }, outs));
                            }
                        }
                        else
                        {
                            //add to hidden neurons
                            if (!hiddenNeurons.Contains(xyPoint))
                            {
                                hiddenNeurons.Add(xyPoint);
                            }

                            src = hiddenNeurons.IndexOf(xyPoint);

                            if (!hiddenNeurons.Contains(otherPoint))
                            {
                                hiddenNeurons.Add(otherPoint);
                            }

                            tgt = hiddenNeurons.IndexOf(otherPoint);

                            conSourcePoints.Add(counter, xyPoint);
                            conTargetPoints.Add(counter, otherPoint);

                            indexToConnectionMap.Add(allBodyOutputs.Count - 1, counter);
                            connections.Add(new ConnectionGene(counter++, (src), (tgt), weight * HyperNEATParameters.weightRange, new float[] { xyPoint.X, xyPoint.Y, otherPoint.X, otherPoint.Y }, outs));
                        }


                        //PointF newp = new PointF(p.x2, p.y2);

                        //targetIndex = hiddenNeurons.IndexOf(newp);
                        //if (targetIndex == -1)
                        //{
                        //    targetIndex = hiddenNeurons.Count;
                        //    hiddenNeurons.Add(newp);
                        //}
                        //connections.Add(new ConnectionGene(counter++, (sourceIndex), (targetIndex + inputCount + outputCount), p.weight * HyperNEATParameters.weightRange, new float[] { p.x1, p.y1, p.x2, p.y2 }, p.Outputs));
                    }
                }
            }



            //esSubstrate.generateSubstrate(inputs, outputs, net,
            //    HyperNEATParameters.initialDepth,
            //    (float)HyperNEATParameters.varianceThreshold,
            //     (float)HyperNEATParameters.bandingThreshold,
            //    HyperNEATParameters.ESIterations,
            //     (float)HyperNEATParameters.divisionThreshold,
            //    HyperNEATParameters.maximumDepth,
            //    (uint)inputs.Count, (uint)outputs.Count,
            //    ref connections, ref hiddenNeurons, true);


            //generateSubstrate(List<System.Drawing.PointF> inputNeuronPositions, List<PointF> outputNeuronPositions,
            //INetwork genome, int initialDepth, float varianceThreshold, float bandThreshold, int ESIterations,
            //                                    float divsionThreshold, int maxDepth,
            //                                    uint inputCount, uint outputCount,
            //                                    ref  ConnectionGeneList connections, ref List<PointF> hiddenNeurons)

            //blow out the object, we don't care about testing it

            //foreach (var pPair in pointsChecked)
            //{
            //    Console.WriteLine("Checking: " + pPair.Key + " processed: ");

            //    foreach (var xyPoint in pPair.Value)
            //    {
            //        Console.WriteLine("({0}, {1}) ", xyPoint.X, xyPoint.Y);
            //    }
            //}

            var beforeConn   = connections.Count;
            var beforeNeuron = hiddenNeurons.Count;

            //var hiddenCopy = new List<PointF>(hiddenNeurons);

            ensureSingleConnectedStructure(connections, hiddenNeurons, conSourcePoints, conTargetPoints);

            if (hiddenNeurons.Count > 20 || connections.Count > 100)
            {
                hiddenNeurons = new List <PointF>();
                connections   = new ConnectionGeneList();
            }


            if (hiddenNeurons.Count == 0 || connections.Count == 0)
            {
                isEmpty = true;
            }

            NeatGenome ng = (NeatGenome)genome;

            bool behaviorExists = (ng.Behavior != null);

            ESBodyInformation esbody = new ESBodyInformation()
            {
                AllBodyOutputs    = allBodyOutputs,
                AllBodyInputs     = allBodyInputs,
                indexToConnection = indexToConnectionMap,
                //PreHiddenLocations = hiddenCopy,
                BeforeNeuron     = beforeNeuron,
                BeforeConnection = beforeConn,
                GenomeID         = genome.GenomeId,
                Connections      = connections,
                HiddenLocations  = hiddenNeurons,
                InputLocations   = inputs,
                Objectives       = ng.objectives,
                Fitness          = ng.Fitness,
                Locality         = ng.locality,
                useLEO           = useLeo
            };

            Console.WriteLine(" Nodes: " + hiddenNeurons.Count + " Connections: " + connections.Count);

            return(esbody);
        }