public FieldSubs3LQTable(RLClientBase client, int numTeammates, int myUnum)
            : base(client, numTeammates, myUnum, false)
        {
            if (Program.LoadingGenome)
            {
                try
                {
                    var doc = new XmlDocument();
                    doc.Load(Program.LoadedGenomePath);
                    NeatGenome readCPPN = XmlNeatGenomeReaderStatic.Read(doc);

                    var substrate = new MultiLayerSandwichSubstrate(Rows, Cols + 2, NeatExpParams.SubstrateLayers,
                                                                    NeatExpParams.AddBiasToSubstrate, HyperNEATParameters.substrateActivationFunction);

                    SharpNeatLib.NeatGenome.NeatGenome tempGenome = substrate.GenerateGenome(readCPPN.Decode(HyperNEATParameters.substrateActivationFunction));
                    m_loadedNetwork = tempGenome.Decode(HyperNEATParameters.substrateActivationFunction);

                    m_loadingBehaviour = true;
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.ToString());
                    m_loadingBehaviour = false;
                }
            }
        }
Ejemplo n.º 2
0
		static public INetwork DecodeToConcurrentNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
		{
		//----- Loop the neuronGenes. Create Neuron for each one.
			// Store a table of neurons keyed by their id.
			Hashtable neuronTable = new Hashtable(g.NeuronGeneList.Count);
			NeuronList neuronList = new NeuronList();

			foreach(NeuronGene neuronGene in g.NeuronGeneList)
			{
				Neuron newNeuron = new Neuron(activationFn, neuronGene.NeuronType, neuronGene.InnovationId);
				neuronTable.Add(newNeuron.Id, newNeuron);
				neuronList.Add(newNeuron);
			}

		//----- Loop the connection genes. Create a Connection for each one and bind them to the relevant Neurons.
			foreach(ConnectionGene connectionGene in g.ConnectionGeneList)
			{
				Connection newConnection = new Connection(connectionGene.SourceNeuronId, connectionGene.TargetNeuronId, connectionGene.Weight);

				// Bind the connection to it's source neuron.
				newConnection.SetSourceNeuron((Neuron)neuronTable[connectionGene.SourceNeuronId]);

				// Store the new connection against it's target neuron.
				((Neuron)(neuronTable[connectionGene.TargetNeuronId])).ConnectionList.Add(newConnection);
			}

			return new ConcurrentNetwork(neuronList);
		}
Ejemplo n.º 3
0
        public static FastConcurrentMultiplicativeNetwork DecodeToFastConcurrentMultiplicativeNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
        {
            int outputNeuronCount = g.OutputNeuronCount;
            int neuronGeneCount = g.NeuronGeneList.Count;

            // Slightly inefficient - determine the number of bias nodes. Fortunately there is not actually
            // any reason to ever have more than one bias node - although there may be 0.
            int neuronGeneIdx=0;
            for(; neuronGeneIdx<neuronGeneCount; neuronGeneIdx++)
            {
                if(g.NeuronGeneList[neuronGeneIdx].NeuronType != NeuronType.Bias)
                    break;
            }
            int biasNodeCount = neuronGeneIdx;
            int inputNeuronCount = g.InputNeuronCount;

            // ConnectionGenes point to a neuron ID. We need to map this ID to a 0 based index for
            // efficiency. To do this we build a table of indexes (ints) keyed on neuron ID.
            // TODO: An alternative here would be to forgo the building of a table and do a binary
            // search directly on the NeuronGeneList - probably a good idea to use a heuristic based upon
            // neuroncount*connectioncount that decides on which technique to use. Small networks will
            // likely be faster to decode using the binary search.

            // Actually we can partly achieve the above optimzation by using HybridDictionary instead of Hashtable.
            // Although creating a table is a bit expensive.
            HybridDictionary neuronIndexTable = new HybridDictionary(neuronGeneCount);
            for(int i=0; i<neuronGeneCount; i++)
                neuronIndexTable.Add(g.NeuronGeneList[i].InnovationId, i);

            // Count how many of the connections are actually enabled. TODO: make faster - store disable count?
            int connectionGeneCount = g.ConnectionGeneList.Count;
            int connectionCount=connectionGeneCount;
            //			for(int i=0; i<connectionGeneCount; i++)
            //			{
            //				if(g.ConnectionGeneList[i].Enabled)
            //					connectionCount++;
            //			}

            // Now we can build the connection array(s).
            FloatFastConnection[] connectionArray = new FloatFastConnection[connectionCount];
            int connectionIdx=0;
            for(int connectionGeneIdx=0; connectionGeneIdx<connectionCount; connectionGeneIdx++)
            {
                ConnectionGene connectionGene = g.ConnectionGeneList[connectionIdx];
                connectionArray[connectionIdx].sourceNeuronIdx = (int)neuronIndexTable[connectionGene.SourceNeuronId];
                connectionArray[connectionIdx].targetNeuronIdx = (int)neuronIndexTable[connectionGene.TargetNeuronId];
                connectionArray[connectionIdx].weight = (float)connectionGene.Weight;
                connectionIdx++;
            }

            // Now sort the connection array on sourceNeuronIdx, secondary sort on targetNeuronIdx.
            // TODO: custom sort routine to prevent boxing/unboxing required by Array.Sort(ValueType[])
            //Array.Sort(connectionArray, fastConnectionComparer);
            QuickSortFastConnections(0, fastConnectionArray.Length-1);

            return new FastConcurrentMultiplicativeNetwork(
                biasNodeCount, inputNeuronCount,
                outputNeuronCount, neuronGeneCount,
                connectionArray, activationFn);
        }
Ejemplo n.º 4
0
		//if genome x dominates y, increment y's dominated count, add y to x's dominated list
		public void update_domination(NeatGenome.NeatGenome x, NeatGenome.NeatGenome y,RankInformation r1,RankInformation r2)
		{
			if(dominates(x,y)) {
				r1.dominates.Add(r2);
				r2.domination_count++;
			}
		}
        private void EvolutionaryThread()
        {
            m_exp = CreateExperiment();
            var idgen = new IdGenerator();

            m_evoAlg = new EvolutionAlgorithm(
                new Population(idgen,
                               GenomeFactory.CreateGenomeList(m_exp.DefaultNeatParameters, idgen,
                                                              m_exp.InputNeuronCount, m_exp.OutputNeuronCount,
                                                              m_exp.DefaultNeatParameters.pInitialPopulationInterconnections,
                                                              NeatExpParams.PopulationSize)),
                m_exp.PopulationEvaluator, m_exp.DefaultNeatParameters);

            while (!m_shouldQuit)
            {
                Console.WriteLine("::::: Performing one generation");
                Console.WriteLine();

                m_evoAlg.PerformOneGeneration();

                if (NeatExpParams.SaveFitnessGrowth)
                {
                    m_eaLogger.WriteLine(String.Format("{0,-10} {1,-20} {2,-20} {3,-20}",
                                                       m_evoAlg.Generation,
                                                       m_evoAlg.BestGenome.Fitness,
                                                       m_evoAlg.Population.MeanFitness, m_evoAlg.Population.AvgComplexity));
                }

                m_curBestGenome = m_evoAlg.BestGenome as NeatGenome;
                if (m_evoAlg.BestGenome.Fitness > m_overalBestFitness)
                {
                    m_overalBestFitness = m_evoAlg.BestGenome.Fitness;
                    m_overalBestGenome  = m_curBestGenome;

                    if (NeatExpParams.SaveEachGenerationChampionCPPN)
                    {
                        try
                        {
                            var doc = new XmlDocument();
                            XmlGenomeWriterStatic.Write(doc, (NeatGenome)m_evoAlg.BestGenome);
                            var oFileInfo = new FileInfo(Path.Combine(
                                                             NeatExpParams.EALogDir, String.Format("BestIndividual-{0}-{1}.xml", MyUnum, m_evoAlg.Generation.ToString())));
                            doc.Save(oFileInfo.FullName);
                        }
                        catch
                        {
                        }
                    }
                }

                if (EAUpdate != null)
                {
                    EAUpdate.Invoke(this, EventArgs.Empty);
                }
            }
        }
Ejemplo n.º 6
0
 public void initializeEvolution(int populationSize, NeatGenome seedGenome)
 {
     if (seedGenome == null)
     {
         initializeEvolution(populationSize);
         return;
     }
     logOutput = new StreamWriter(outputFolder + "logfile.txt");
     IdGenerator idgen = new IdGeneratorFactory().CreateIdGenerator(seedGenome);
     ea = new EvolutionAlgorithm(new Population(idgen, GenomeFactory.CreateGenomeList(seedGenome, populationSize, experiment.DefaultNeatParameters, idgen)), experiment.PopulationEvaluator, experiment.DefaultNeatParameters);
 }
        public GenomeVisualizerForm(NeatGenome genome)
        {
            InitializeComponent();
            updatePicture = true;
            _currentGenome = genome;
            _currentSelectedConnectionIndex = -1;
            //currentSelectedIndex = null;

            ShowGenomeNetwork(genome);
            ShowGenomeConnections(genome);

            ShowHyperCubeSlide(genome);
        }
        private void ShowHyperCubeSlide(NeatGenome genome)
        {
            float x = (float)numericUpDownX.Value;
            float y = (float)numericUpDownY.Value;

            float starty = -1.0f;
            float startx;

            INetwork net = genome.Decode(null);
            float[] coordinates = new float[4];
            coordinates[0] = x;
            coordinates[1] = y;
            double output;

            if (pictureBox1 != null) return;

            Graphics g = pictureBox1.CreateGraphics();

            while (starty < 1.0f)
            {            
                startx = -1.0f;

                coordinates[3] = starty;
                while (startx < 1.0f)
                {
                    coordinates[2] = startx;
                    net.ClearSignals();
                    net.SetInputSignals(coordinates);
                    net.MultipleSteps(3);
                    output = net.GetOutputSignal(0);
                    //HyberCubeSlidePanel.
                    if (output < 0.0f)
                    {
                        brush.Color = Color.FromArgb((int)(output* -255.0f), 0, 0);
                    }
                    else
                    {
                        brush.Color = Color.FromArgb(0,0,(int)(output * 255.0f));
                    }

                    g.FillRectangle(brush, new Rectangle((int)(startx * 100.0f)+100, 100-(int)(starty * 100.0f), 1, 1));
                    
                    //Show origin
                    startx += 0.01f;
                }
                starty += 0.01f;
            }
            brush.Color = Color.Green;
            g.FillEllipse(brush, new Rectangle((int)(x * 100.0f) +100, 100-(int)(y * 100.0f), 5, 5));
        }
Ejemplo n.º 9
0
        public NeatGenome.NeatGenome generatePerceptronPattern(INetwork network, bool distance)
        {
            ConnectionGeneList connections = new ConnectionGeneList((int)(inputCount * outputCount));

            double[] inputs;
            if (distance)
            {
                inputs = new double[5];
            }
            else
            {
                inputs = new double[4];
            }
            //for this particular config, these inputs will never change so just set them now
            inputs[1] = 1;
            inputs[3] = -1;
            uint   counter = 0;
            double output;
            double x1 = -1, x2 = -1;

            double inputDelta  = (2.0 / (inputCount - 1));
            double outputDelta = (2.0 / (outputCount - 1));

            for (uint nodeFrom = 0; nodeFrom < inputCount; nodeFrom++, x1 += inputDelta)
            {
                inputs[0] = x1;
                x2        = -1;
                for (uint nodeTo = 0; nodeTo < outputCount; nodeTo++, x2 += outputDelta)
                {
                    inputs[2] = x2;
                    if (distance)
                    {
                        inputs[4] = ((Math.Sqrt(Math.Pow(inputs[0] - inputs[2], 2) + Math.Pow(inputs[1] - inputs[3], 2)) / (2 * sqrt2)));
                    }
                    network.ClearSignals();
                    network.SetInputSignals(inputs);
                    //currenly assuming a depth no greater than 5
                    network.MultipleSteps(5);
                    output = network.GetOutputSignal(0);
                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(counter++, nodeFrom, nodeTo + inputCount, weight));
                    }
                }
            }
            NeatGenome.NeatGenome g = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)inputCount, (int)outputCount);
            return(g);
        }
        public static void Write(XmlNode parentNode, NeatGenome.NeatGenome genome, IActivationFunction activationFn)
        {
            //----- Start writing. Create document root node.
            XmlElement xmlNetwork = XmlUtilities.AddElement(parentNode, "network");
            XmlUtilities.AddAttribute(xmlNetwork, "activation-fn-id", activationFn.FunctionId);

            //----- Write neurons.
            XmlElement xmlNeurons = XmlUtilities.AddElement(xmlNetwork, "neurons");
            foreach(NeuronGene neuronGene in genome.NeuronGeneList)
                WriteNeuron(xmlNeurons, neuronGene);

            //----- Write Connections.
            XmlElement xmlConnections = XmlUtilities.AddElement(xmlNetwork, "connections");
            foreach(ConnectionGene connectionGene in genome.ConnectionGeneList)
                WriteConnection(xmlConnections, connectionGene);
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Copy constructor.
        /// </summary>
        /// <param name="copyFrom"></param>
        public NeatGenome(NeatGenome copyFrom, uint genomeId)
        {
            this.genomeId = genomeId;

            // No need to loop the arrays to clone each element because NeuronGene and ConnectionGene are
            // value data types (structs).
            neuronGeneList = new NeuronGeneList(copyFrom.neuronGeneList);
            connectionGeneList = new ConnectionGeneList(copyFrom.connectionGeneList);

            inputNeuronCount = copyFrom.inputNeuronCount;
            inputAndBiasNeuronCount = copyFrom.inputNeuronCount+1;
            outputNeuronCount = copyFrom.outputNeuronCount;
            inputBiasOutputNeuronCount = copyFrom.inputBiasOutputNeuronCount;
            inputBiasOutputNeuronCountMinus2 = copyFrom.inputBiasOutputNeuronCountMinus2;

            Debug.Assert(connectionGeneList.IsSorted(), "ConnectionGeneList is not sorted by innovation ID");
        }
Ejemplo n.º 12
0
		//function to check whether genome x dominates genome y, usually defined as being no worse on all
		//objectives, and better at at least one
		public bool dominates(NeatGenome.NeatGenome x, NeatGenome.NeatGenome y) {
			bool better=false;
			double[] objx = x.objectives, objy = y.objectives;
			int sz = objx.Length;
			//if x is ever worse than y, it cannot dominate y
			//also check if x is better on at least one
			for(int i=0;i<sz-1;i++) {
				if(objx[i]<objy[i]) return false;
				if(objx[i]>objy[i]) better=true;
			}
			
			//genomic novelty check, disabled for now
			double thresh=0.1;
			if((objx[sz-1]+thresh)<(objy[sz-1])) return false;
			if((objx[sz-1]>(objy[sz-1]+thresh))) better=true;
			
			return better;
		}
Ejemplo n.º 13
0
		/// <summary>
		/// Create an IdGeneratoy by interrogating the provided Genome.
		/// </summary>
		/// <param name="pop"></param>
		/// <returns></returns>
		public IdGenerator CreateIdGenerator(NeatGenome genome)
		{
			uint maxGenomeId=0;
			uint maxInnovationId=0;

			// First pass: Determine the current maximum genomeId and innovationId.
			if(genome.GenomeId > maxGenomeId)
				maxGenomeId = genome.GenomeId;

			// Neuron IDs actualy come from the innovation IDs generator, so although they 
			// aren't used as historical markers we should count them as innovation IDs here.
			foreach(NeuronGene neuronGene in genome.NeuronGeneList)
			{
				if(neuronGene.InnovationId > maxInnovationId)
					maxInnovationId = neuronGene.InnovationId;
			}

			foreach(ConnectionGene connectionGene in genome.ConnectionGeneList)
			{
				if(connectionGene.InnovationId > maxInnovationId)
					maxInnovationId = connectionGene.InnovationId;
			}

			if(maxGenomeId==uint.MaxValue)
			{	 //reset to zero.
				maxGenomeId=0;
			}
			else
			{	// Increment to next available ID.
				maxGenomeId++;
			}

			if(maxInnovationId==uint.MaxValue)
			{	 //reset to zero.
				maxInnovationId=0;
			}
			else
			{	// Increment to next available ID.
				maxInnovationId++;
			}

			// Create an IdGenerator using the discovered maximum IDs.						
			return new IdGenerator(maxGenomeId, maxInnovationId);
		}
        public static void Write(XmlNode parentNode, NeatGenome genome)
        {
            //----- Start writing. Create document root node.
            XmlElement xmlGenome = XmlUtilities.AddElement(parentNode, "genome");
            XmlUtilities.AddAttribute(xmlGenome, "id", genome.GenomeId.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "species-id", genome.SpeciesId.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "age", genome.GenomeAge.ToString());
            XmlUtilities.AddAttribute(xmlGenome, "fitness", genome.Fitness.ToString("0.00"));

            //----- Write neurons.
            XmlElement xmlNeurons = XmlUtilities.AddElement(xmlGenome, "neurons");
            foreach(NeuronGene neuronGene in genome.NeuronGeneList)
                WriteNeuron(xmlNeurons, neuronGene);

            //----- Write Connections.
            XmlElement xmlConnections = XmlUtilities.AddElement(xmlGenome, "connections");
            foreach(ConnectionGene connectionGene in genome.ConnectionGeneList)
                WriteConnectionGene(xmlConnections, connectionGene);
        }
Ejemplo n.º 15
0
		static public NetworkModel DecodeToNetworkModel(NeatGenome.NeatGenome g)
		{
			ModelNeuronList masterNeuronList = new ModelNeuronList();

			// loop all neurons and build a table keyed on id.
			HybridDictionary neuronTable = new HybridDictionary(g.NeuronGeneList.Count);
			foreach(NeuronGene neuronGene in g.NeuronGeneList)
			{
				ModelNeuron modelNeuron = new ModelNeuron(neuronGene.NeuronType, neuronGene.InnovationId,neuronGene.ActivationFunction);
				neuronTable.Add(modelNeuron.Id, modelNeuron);
				masterNeuronList.Add(modelNeuron);
			}

			// Loop through all of the connections.
			// Now we have a neuron table keyed on id we can attach the connections
			// to their source and target neurons.
			foreach(ConnectionGene connectionGene in g.ConnectionGeneList)
			{
				ModelConnection modelConnection = new ModelConnection();
				modelConnection.Weight = connectionGene.Weight;
				modelConnection.SourceNeuron = (ModelNeuron)neuronTable[connectionGene.SourceNeuronId];
				modelConnection.TargetNeuron = (ModelNeuron)neuronTable[connectionGene.TargetNeuronId];

				modelConnection.SourceNeuron.OutConnectionList.Add(modelConnection);
				modelConnection.TargetNeuron.InConnectionList.Add(modelConnection);
			}

            //Sebastian. Build Model connections
            foreach (ModuleGene mg in g.ModuleGeneList)
            {
                foreach (uint sourceID in mg.InputIds)
                {
                    foreach (uint targetID in mg.OutputIds)
                    {
                        ModelConnection modelConnection = new ModelConnection();
                        modelConnection.Weight = 1.0; //TODO  connectionGene.Weight;
                        modelConnection.SourceNeuron = (ModelNeuron)neuronTable[sourceID];
                        modelConnection.TargetNeuron = (ModelNeuron)neuronTable[targetID];

                        modelConnection.SourceNeuron.OutConnectionList.Add(modelConnection);
                        modelConnection.TargetNeuron.InConnectionList.Add(modelConnection);
                    }
                }
            }

			return new NetworkModel(masterNeuronList);
		}
Ejemplo n.º 16
0
        static public ModularNetwork DecodeToCTRNN(NeatGenome.NeatGenome g)
        {
            int inputCount = g.InputNeuronCount;
            int outputCount = g.OutputNeuronCount;
            int neuronCount = g.NeuronGeneList.Count;

            IActivationFunction[] activationFunctions = new IActivationFunction[neuronCount];
            float[] biasList = new float[neuronCount];
            float[] timeConst = new float[neuronCount];

            Dictionary<uint, int> neuronLookup = new Dictionary<uint, int>(neuronCount);

            // Create an array of the activation functions for each non-module node node in the genome.
            // Start with a bias node if there is one in the genome.
            // The genome's neuron list is assumed to be ordered by type, with the bias node appearing first.
            int neuronGeneIndex = 0;
            for (; neuronGeneIndex < neuronCount; neuronGeneIndex++)
            {
                if (g.NeuronGeneList[neuronGeneIndex].NeuronType != NeuronType.Bias)
                    break;
                activationFunctions[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].ActivationFunction;
                neuronLookup.Add(g.NeuronGeneList[neuronGeneIndex].InnovationId, neuronGeneIndex);
            }
            int biasCount = neuronGeneIndex;
            for (; neuronGeneIndex < neuronCount; neuronGeneIndex++)
            {
                activationFunctions[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].ActivationFunction;
                neuronLookup.Add(g.NeuronGeneList[neuronGeneIndex].InnovationId, neuronGeneIndex);
                biasList[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].Bias;
                timeConst[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].TimeConstant;
            }

            // Create an array of the activation functions, inputs, and outputs for each module in the genome.
            ModulePacket[] modules = new ModulePacket[g.ModuleGeneList.Count];
            for (int i = g.ModuleGeneList.Count - 1; i >= 0; i--)
            {
                modules[i].function = g.ModuleGeneList[i].Function;
                // Must translate input and output IDs to array locations.
                modules[i].inputLocations = new int[g.ModuleGeneList[i].InputIds.Count];
                for (int j = g.ModuleGeneList[i].InputIds.Count - 1; j >= 0; j--)
                {
                    modules[i].inputLocations[j] = neuronLookup[g.ModuleGeneList[i].InputIds[j]];
                }
                modules[i].outputLocations = new int[g.ModuleGeneList[i].OutputIds.Count];
                for (int j = g.ModuleGeneList[i].OutputIds.Count - 1; j >= 0; j--)
                {
                    modules[i].outputLocations[j] = neuronLookup[g.ModuleGeneList[i].OutputIds[j]];
                }
            }

            // ConnectionGenes point to a neuron's innovation ID. Translate this ID to the neuron's index in the neuron array. 
            FloatFastConnection[] connections = new FloatFastConnection[g.ConnectionGeneList.Count];
            for (int connectionGeneIndex = g.ConnectionGeneList.Count - 1; connectionGeneIndex >= 0; connectionGeneIndex--)
            {
                ConnectionGene connectionGene = g.ConnectionGeneList[connectionGeneIndex];
                connections[connectionGeneIndex].sourceNeuronIdx = neuronLookup[connectionGene.SourceNeuronId];
                connections[connectionGeneIndex].targetNeuronIdx = neuronLookup[connectionGene.TargetNeuronId];
                connections[connectionGeneIndex].weight = (float)connectionGene.Weight;

            }

            CTRNN mn = new CTRNN(biasCount, inputCount, outputCount, neuronCount, connections, biasList, activationFunctions, modules,timeConst);

            mn.genome = g;
            return mn;
        }
        // NOTE: Multi-Plane Substrates ARE supported by this method!
        private NeatGenome.NeatGenome generateHiveBrainGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet,bool ct)
        {
            //bool relativeCoordinate = false;
            bool oneWay = false;
            bool homogeneous = false ;
            Dictionary<String, float> weights = new Dictionary<String, float>();
            float timeConstantMin = 0.1f;
            float timeConstantMax = 2.0f;

            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount))); // TODO: Perhaps get an exact count of connections in the constructor and use that value here?
            float[] coordinates = new float[5]; //JUSTIN: Used to be 6 coordinates, zstack was duplicated for relativeCoordinate hyjinx. fixed it. // Inputs to the CPPN: [srcX, srcY, tgX, tgY, zstack]
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            // CPPN Outputs: [ Weights ] [ Biases ]
            // When using multi-plane substrates, there will be multiple Weight and Bias outputs.
            // There is a Weight output for every plane-to-plane connection (including a plane connected to itself, as in regular substrates)
            // There is a Bias output for every plane
            // Since "regular substrates" only have 1 plane, they only have 1 Weight and 1 Bias output. MP substrates have more. :)
            int numPlanes = planes.Count;
            int numPlaneConnections = planesConnected.Count;
            int computedIndex;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                //coordinates[4] = homogeneous ? 0 : stackCoordinate;//-1 ? -1 : 0;//0;//stackCoordinate;
                //coordinates[5] = stackCoordinate;
                uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        /*if (!relativeCoordinate)
                            coordinates[5] = stackCoordinate;
                        else //USE RELATIVE
                            coordinates[5] = 0;//*/

                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {

                            //-----------------Get the bias of the source node
                           /* switch (ng.GroupType)
                            {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                            }
                            coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.RecursiveActivation();//network.MultipleSteps(iterations);

                            neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                            if (ct)
                            {
                                neurons[(int)sourceID].TimeConstant = 0.01f + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * .05f);
                                System.Diagnostics.Debug.Assert(neurons[(int)sourceID].TimeConstant > 0);
                            }*/
                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                    case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //-----------------Get the bias of the target node
                                   coordinates[0] = target.X; coordinates[1] = target.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
                                   //String s = arrayToString(coordinates);
                                   //if (weights.ContainsKey(s))
                                   //    neurons[(int)targetID].Bias = weights[s];
                                   //else
                                   {
                                       network.ClearSignals();
                                       network.SetInputSignals(coordinates);
                                       network.RecursiveActivation();//network.MultipleSteps(iterations);
                                       computedIndex = numPlaneConnections + planes.IndexOf(connectedNG.Plane);
                                       //neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                       neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(computedIndex) * weightRange);
                                       //weights.Add(s,neurons[(int)targetID].Bias);
                                   }
                                   if (ct)
                                   {
                                       neurons[(int)targetID].TimeConstant = timeConstantMin + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * (timeConstantMax - timeConstantMin));
                                       System.Diagnostics.Debug.Assert(neurons[(int)targetID].TimeConstant > 0);
                                   }
                                //----------------------------

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;
                                //Console.WriteLine(arrayToString(coordinates));
                                
                                //if(weights.ContainsKey(s))
                                //    output = weights[s];
                                //else
                                {
                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    network.RecursiveActivation();//network.MultipleSteps(iterations);
                                    computedIndex = indexOfPlaneConnection(ng.Plane, connectedNG.Plane);
                                    //output = network.GetOutputSignal(0);
                                    output = network.GetOutputSignal(computedIndex);
                                    //weights.Add(s, output);
                                }
                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A = network.GetOutputSignal(2);
                                    B = network.GetOutputSignal(3);
                                    C = network.GetOutputSignal(4);
                                    D = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates));
                                    }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }

                    foreach (uint connectedTo in ng.HiveConnectedTo)
                    {
                        bool wrapAround = true;

                        for (uint agentConnect = 0; agentConnect < stackCoordinates.Count; agentConnect++)
                        {
                            //Make sure we're not making a recurrent connection on the same agent
                            //if (agentConnect == agent)
                            //    continue;
                           // else if ((agent == stackCoordinates.Count - 1 && agentConnect == 0) || (agent == 0 && agentConnect == stackCoordinates.Count - 1))
                           //     ;//agentConnect = 0;
                            if (agent != 0 && agent != stackCoordinates.Count - 1)
                                continue;

                            //if (agent == 1)
                            //    continue;

                            //if (agentConnect != 0 )
                            //    continue;

                            //Limits connections to only neighbors.  Good?
                            //if (!((agent == 0 || agentConnect >= agent - 1) && agentConnect <= agent + 1))
                            //    continue;
                            //if (agentConnect > agent + 1 || agentConnect < agent - 1)
                            //    continue;

                            if (oneWay)
                            {
                                //ONE-WAY
                                if (agentConnect > agent + 1 || agentConnect < agent)
                                    continue;
                            }

                            /*if (!relativeCoordinate)
                                //USE THE Z COORDINATE
                                coordinates[5] = stackCoordinates[(int)agentConnect];
                            else
                                //USE THE RELATIVE COORDINATE
                                coordinates[5] = agentConnect > agent ? 1 : -1;
                            //*/
                            //WRAP AROUND
                            /*if (agent == stackCoordinates.Count - 1 && agentConnect == 0)
                                coordinates[5] = 1;
                            else if (agent == 0 && agentConnect == stackCoordinates.Count - 1)
                                coordinates[5] = -1;
                             */

                            connectedNG = getNeuronGroup(connectedTo);

                            sourceCount = 0;
                            foreach (PointF source in ng.NeuronPositions)
                            {

                                //-----------------Get the bias of the source node
                               /* switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                }
                                coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.RecursiveActivation();//network.MultipleSteps(iterations);

                                neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                if (ct)
                                {
                                    neurons[(int)sourceID].TimeConstant = 0.01f + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * .05f);
                                    System.Diagnostics.Debug.Assert(neurons[(int)sourceID].TimeConstant > 0);
                                }*/
                                //----------------------------

                                targetCout = 0;
                                foreach (PointF target in connectedNG.NeuronPositions)
                                {
                                    /*if ((source.X != target.X))
                                    {
                                        targetCout++;
                                        continue;
                                    }*/
                                    if (/*source.X!= target.X ||*/ target.X != coordinates[4])// || source.X!= coordinates[4])
                                    {
                                        targetCout++;
                                        continue;
                                    }
                                   /* if (agent != 0 && agent != stackCoordinates.Count - 1)
                                    { 
                                        if(agentConnect != 0 && agentConnect != stackCoordinates.Count - 1)
                                        {
                                            targetCout++;
                                            continue;
                                        }
                                    }*/
                                    switch (ng.GroupType)
                                    {
                                        case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                        case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                        case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                    }

                                    switch (connectedNG.GroupType)
                                    {
                                        case 0: targetID = (agentConnect * InputCount) + connectedNG.GlobalID + targetCout; break;
                                        case 1: targetID = totalInputCount + (agentConnect * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                        case 2: targetID = totalInputCount + totalOutputCount + (agentConnect * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                    }

                                    //-----------------Get the bias of the target node
                                    coordinates[0] = target.X; coordinates[1] = target.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
                                    //String s = arrayToString(coordinates);
                                    //if (weights.ContainsKey(s))
                                    //    neurons[(int)targetID].Bias = weights[s];
                                    //else
                                    {
                                        network.ClearSignals();
                                        network.SetInputSignals(coordinates);
                                        network.RecursiveActivation();//network.MultipleSteps(iterations);
                                        computedIndex = numPlaneConnections + planes.IndexOf(connectedNG.Plane);
                                        //neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                        neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(computedIndex) * weightRange);
                                       // weights.Add(s, neurons[(int)targetID].Bias);
                                    }
                                    if (ct)
                                    {
                                        neurons[(int)targetID].TimeConstant = timeConstantMin + ((((float)network.GetOutputSignal(2) + 1.0f) / 2.0f) * (timeConstantMax - timeConstantMin));
                                        System.Diagnostics.Debug.Assert(neurons[(int)targetID].TimeConstant > 0);
                                    }
                                    //----------------------------

                                    coordinates[0] = source.X;
                                    coordinates[1] = source.Y;
                                    coordinates[2] = target.X;
                                    coordinates[3] = target.Y;
                                    //s = arrayToString(coordinates);
                                    //if (weights.ContainsKey(s))
                                    //    output = weights[s];
                                    //else
                                    {
                                        network.ClearSignals();
                                        network.SetInputSignals(coordinates);
                                        network.RecursiveActivation();//network.MultipleSteps(iterations);
                                        computedIndex = indexOfPlaneConnection(ng.Plane, connectedNG.Plane);
                                        //output = network.GetOutputSignal(0);
                                        output = network.GetOutputSignal(computedIndex);
                                      //  weights.Add(s, output);
                                    }

                                    double leo = 0.0;

                                    if (adaptiveNetwork)
                                    {
                                        A = network.GetOutputSignal(2);
                                        B = network.GetOutputSignal(3);
                                        C = network.GetOutputSignal(4);
                                        D = network.GetOutputSignal(5);
                                        learningRate = network.GetOutputSignal(6);
                                    }

                                    if (modulatoryNet)
                                    {
                                        modConnection = network.GetOutputSignal(7);
                                    }
                                    else
                                    {
                                        modConnection = 0.0f;
                                    }

                                    if (useLeo)
                                    {
                                        threshold = 0.0;
                                        leo = network.GetOutputSignal(2);
                                    }

                                    if (!useLeo || leo > 0.0)
                                        if (Math.Abs(output) > threshold)
                                        {
                                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                            //if (adaptiveNetwork)
                                            //{
                                            //    //If adaptive network set weight to small value
                                            //    weight = 0.1f;
                                            //}
                                            connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, true));
                                        }
                                    //else
                                    //{
                                    //    Console.WriteLine("Not connected");
                                    //}
                                    targetCout++;
                                }
                                sourceCount++;
                            }
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
        void ShowGenomeConnections(NeatGenome genome)
        {
            int savedIndex = listBoxConnections.SelectedIndex;
            listBoxConnections.Items.Clear();

            /* show information about each connection in the listbox */
            foreach (ConnectionGene connection in genome.ConnectionGeneList)
            {
                NeuronGene sourceNeuron = genome.NeuronGeneList.GetNeuronById(connection.SourceNeuronId);
                NeuronGene destinationNeuron = genome.NeuronGeneList.GetNeuronById(connection.TargetNeuronId);
                string info = string.Format("{0}({1}) --> {2}({3}); weight:{4:F3})", connection.SourceNeuronId, sourceNeuron.NeuronType, connection.TargetNeuronId, destinationNeuron.NeuronType, connection.Weight);
                /* add the info text and the conection object itself in the listitem */
                ListItem item = new ListItem("", info, connection);
                listBoxConnections.Items.Add(item);
            }

            /* try to update selected savedIndex and refresh the drawed network */
            listBoxConnections.SelectedIndex = listBoxConnections.Items.Count > savedIndex ? savedIndex : -1;
           // ShowNetworkFromGenome(genome);
        }
        private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));
            float[] coordinates = new float[4];
            float output;
            uint connectionCounter = 0;
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount;
            uint totalInputCount = InputCount;
            uint totalHiddenCount = HiddenCount;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;

            foreach (NeuronGroup ng in neuronGroups)
            {
                foreach (uint connectedTo in ng.ConnectedTo)
                {
                    connectedNG = getNeuronGroup(connectedTo);

                    sourceCount = 0;
                    foreach (PointF source in ng.NeuronPositions)
                    {

                        targetCout = 0;
                        foreach (PointF target in connectedNG.NeuronPositions)
                        {
                            switch (ng.GroupType)
                            {
                                case 0: sourceID = ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break;  //Hidden
                            }

                            switch (connectedNG.GroupType)
                            {
                                case 0: targetID = connectedNG.GlobalID + targetCout; break;
                                case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;
                                case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
                            }

                            //calculate bias of target node
                            if (!biasCalculated[targetID])
                            {
                                coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                biasCalculated[targetID] = true;
                            }

                            coordinates[0] = source.X;
                            coordinates[1] = source.Y;
                            coordinates[2] = target.X;
                            coordinates[3] = target.Y;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            ((ModularNetwork)network).RecursiveActivation();
                            //network.MultipleSteps(iterations);
                            output = network.GetOutputSignal(0);

                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                            }
                            //else
                            //{
                            //    Console.WriteLine("Not connected");
                            //}
                            targetCout++;
                        }
                        sourceCount++;
                    }
                }
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
            return gn;
        }
Ejemplo n.º 20
0
        public void loadGenome(String filename)
        {
            XmlDocument doc = new XmlDocument();
            doc.Load(filename);
            genome = XmlNeatGenomeReaderStatic.Read(doc);
            bestGenomeSoFar = genome;
            genomeFilename = filename;
		    //TODO maybe include	initialize(); 
        }
        // NOTE: Multi-Plane Substrates ARE MAYBE supported by this method!
        private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool  adaptiveNetwork,bool  modulatoryNet)
        {
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));
            float[] coordinates = new float[4]; //JUSTIN: CHANGE THIS BACK TO [4]!!!
            float output;
            uint connectionCounter = 0;
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount;
            uint totalInputCount = InputCount;
            uint totalHiddenCount = HiddenCount;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            // CPPN Outputs: [ Weights ] [ Biases ]
            // When using multi-plane substrates, there will be multiple Weight and Bias outputs.
            // There is a Weight output for every plane-to-plane connection (including a plane connected to itself, as in regular substrates)
            // There is a Bias output for every plane
            // Since "regular substrates" only have 1 plane, they only have 1 Weight and 1 Bias output. MP substrates have more. :)
            int numPlanes = planes.Count;
            int numPlaneConnections = planesConnected.Count;
            int computedIndex;

            uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;

            foreach (NeuronGroup ng in neuronGroups)
            {
                foreach (uint connectedTo in ng.ConnectedTo)
                {
                    connectedNG = getNeuronGroup(connectedTo);

                    sourceCount = 0;
                    foreach (PointF source in ng.NeuronPositions)
                    {

                        //-----------------Get the bias of the source node
                        /*switch (ng.GroupType)
                        {
                            case 0: sourceID = ng.GlobalID + sourceCount; break;                             //Input
                            case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                //Output
                            case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break;  //Hidden
                        }
                        coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                        network.ClearSignals();
                        network.SetInputSignals(coordinates);
                        network.RecursiveActivation();//network.MultipleSteps(iterations);

                        neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                        //*///----------------------------

                        targetCout = 0;
                        foreach (PointF target in connectedNG.NeuronPositions)
                        {
                            switch (ng.GroupType)
                            {
                                case 0: sourceID = ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break;  //Hidden
                            }

                            switch (connectedNG.GroupType)
                            {
                                case 0: targetID = connectedNG.GlobalID + targetCout; break;
                                case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;
                                case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
                            }

                            //-----------------Get the bias of the target node
                            coordinates[0] = target.X; coordinates[1] = target.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;
                            //coordinates[4] = 0.0f; coordinates[5] = 0.0f; //JUSTIN: REMOVE THIS!!!
                            //String s = arrayToString(coordinates);
                            //if (weights.ContainsKey(s))
                            //    neurons[(int)targetID].Bias = weights[s];
                            //else
                            {
                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.RecursiveActivation();//network.MultipleSteps(iterations);
                                computedIndex = numPlaneConnections + planes.IndexOf(connectedNG.Plane);
                                //neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(computedIndex) * weightRange);
                                //weights.Add(s,neurons[(int)targetID].Bias);
                            }
                            //----------------------------

                            coordinates[0] = source.X;
                            coordinates[1] = source.Y;
                            coordinates[2] = target.X;
                            coordinates[3] = target.Y;
                            //coordinates[4] = source.X - target.X; coordinates[5] = source.Y - target.Y; //JUSTIN: REMOVE THIS!!!

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.RecursiveActivation();//network.MultipleSteps(iterations);
                            computedIndex = indexOfPlaneConnection(ng.Plane, connectedNG.Plane);
                            //output = network.GetOutputSignal(0);
                            output = network.GetOutputSignal(computedIndex);

                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates));
                            }
                            //else
                            //{
                            //    Console.WriteLine("Not connected");
                            //}
                            targetCout++;
                        }
                        sourceCount++;
                    }
                }
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
            return gn;
        }
        public NeatGenome.NeatGenome generateGenomeStackSituationalPolicy(INetwork network, List <float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet, float signal)
        {
            // Schrum: For debugging
            //Console.WriteLine("generateGenomeStackSituationalPolicy:signal=" + signal);
            //Console.WriteLine("CPPN inputs = " + network.InputNeuronCount);

            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));

            // Schrum: Too many inputs: Only store those that are needed
            //float[] coordinates = new float[5 + 1]; // <-- Schrum: bit sloppy: frequently results in unused CPPN inputs. Should make more precise
            float[] coordinates = new float[network.InputNeuronCount]; // Schrum: CPPN tracks how many inputs it needs
            float   output;
            uint    connectionCounter = 0;
            float   agentDelta        = 2.0f / (numberOfAgents - 1);
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount  = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            // Schrum: If we are inside this function, then we either have a heterogeneous team
            //         of a single agent (not sure why that ended up being the case; odd use of homogeneousTeam).
            //         Therefore, numberOfAgents tells us whether we need to save space for a Z-coordinate,
            //         and whether we are expecting a Situation input.
            if (numberOfAgents == 1 && coordinates.Length > 4)
            {
                coordinates[4] = signal; // No Z coord, but save situation
            }
            else if (coordinates.Length > 5)
            {
                coordinates[5] = signal; // Both Z coord and situation
            }
            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in thisorder: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint  agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                // Schrum: Only include Z-coord as input if there are multiple team members
                if (numberOfAgents > 1)
                {
                    coordinates[4] = stackCoordinate; // Schrum: z-coord will always be at index 4
                }
                // Schrum: Debug
                //Console.WriteLine("CPPN inputs (first 4 blank): " + string.Join(",", coordinates));


                uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;


                        foreach (PointF source in ng.NeuronPositions)
                        {
                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;

                                //Input
                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;

                                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;      //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;

                                case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;

                                case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //--- bias
                                //-----------------Get the bias of the target node
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID]    = true;
                                }
                                //--bias



                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                // Schrum: Debug
                                //Console.WriteLine("CPPN inputs: " + string.Join(",", coordinates));

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                // Schrum: Observation: It seems impossible to use both LEO and adaptive networks because of these hardcoded magic numbers
                                if (adaptiveNetwork)
                                {
                                    A            = network.GetOutputSignal(2);
                                    B            = network.GetOutputSignal(3);
                                    C            = network.GetOutputSignal(4);
                                    D            = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                // Schrum: Observation: In long run, might be desirable to use LEO, but incompatible with special preference neuron output
                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo       = network.GetOutputSignal(2);
                                }

                                // Schrum: This is a horrible hack, but it gets the job done for now.
                                // The reason this works is that it makes the following assumptions that could easily be broken in the future:
                                // 1) It is assumed that the only reason a CPPN would have 3 outputs per policy is if the third is for preference links
                                // 2) It is assumed that in a substrate with a preference neuron, the y-coord will always be 0.8, and no other neuron will have
                                //    that y-coord.
                                //Console.WriteLine("output:" + coordinates[0] + "," + coordinates[1] + ":" + coordinates[2] + "," + coordinates[3]);
                                //Console.WriteLine("network.OutputsPerPolicy == 3" + (network.OutputsPerPolicy == 3));
                                //Console.WriteLine("target.Y == 0.8" + (target.Y == 0.8f));
                                if (network.OutputsPerPolicy == 3 && target.Y == 0.8f)
                                {
                                    // The output from the link for the preference neuron replaces the standard output.
                                    // Because the link weight is defined by a totally different CPPN output, the preference
                                    // neuron is more free to behave very differently.
                                    output = network.GetOutputSignal(2);
                                    //Console.WriteLine("Preference output:" + coordinates[0] + "," + coordinates[1] + ":" + coordinates[2] + "," + coordinates[3]);
                                }

                                if (!useLeo || leo > 0.0)
                                {
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight =
                                            (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) *
                                                    weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive networkset weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new
                                                        ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref
                                                                       coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable  = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;

            // Schrum: Debugging
            // Looking at the control networks has revealed that the order of details in the substrate
            // description is important. The layer with the preference neuron has to be defined last
            // if it is to be the final neuron in the linearly organized output layer.
            //XmlDocument doc = new XmlDocument();
            //SharpNeatLib.NeatGenome.Xml.XmlGenomeWriterStatic.Write(doc, sng);
            //System.IO.FileInfo oFileInfo = new System.IO.FileInfo("temp.xml");
            //doc.Save(oFileInfo.FullName);

            return(sng);
        }
        public NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List <float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));

            float[] coordinates = new float[5];
            float   output;
            uint    connectionCounter = 0;
            float   agentDelta        = 2.0f / (numberOfAgents - 1);
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount  = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            // Schrum: debugging

            /*
             * Console.WriteLine("generateMultiGenomeStack");
             * Console.WriteLine("numberOfAgents:" + numberOfAgents);
             * Console.WriteLine("totalOutputCount:" + totalOutputCount);
             * Console.WriteLine("totalInputCount:" + totalInputCount);
             */

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            uint  agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {
                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                                       //Input

                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                    //Output

                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break; //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;

                                case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;

                                case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //target node bias
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID]    = true;
                                }

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A            = network.GetOutputSignal(2);
                                    B            = network.GetOutputSignal(3);
                                    C            = network.GetOutputSignal(4);
                                    D            = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo       = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                {
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable  = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;

            // Schrum: debugging
            //Console.WriteLine("sng.InputNeuronCount:" + sng.InputNeuronCount);
            //Console.WriteLine("sng.OutputNeuronCount:" + sng.OutputNeuronCount);

            return(sng);
        }
        private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            float[] coordinates = new float[4];
            float   output;
            uint    connectionCounter = 0;
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount;
            uint totalInputCount  = InputCount;
            uint totalHiddenCount = HiddenCount;

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];


            uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;

            foreach (NeuronGroup ng in neuronGroups)
            {
                foreach (uint connectedTo in ng.ConnectedTo)
                {
                    connectedNG = getNeuronGroup(connectedTo);

                    sourceCount = 0;
                    foreach (PointF source in ng.NeuronPositions)
                    {
                        targetCout = 0;
                        foreach (PointF target in connectedNG.NeuronPositions)
                        {
                            switch (ng.GroupType)
                            {
                            case 0: sourceID = ng.GlobalID + sourceCount; break;                                      //Input

                            case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                    //Output

                            case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break; //Hidden
                            }

                            switch (connectedNG.GroupType)
                            {
                            case 0: targetID = connectedNG.GlobalID + targetCout; break;

                            case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;

                            case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
                            }

                            //calculate bias of target node
                            if (!biasCalculated[targetID])
                            {
                                coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                biasCalculated[targetID]    = true;
                            }

                            coordinates[0] = source.X;
                            coordinates[1] = source.Y;
                            coordinates[2] = target.X;
                            coordinates[3] = target.Y;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            ((ModularNetwork)network).RecursiveActivation();
                            //network.MultipleSteps(iterations);
                            output = network.GetOutputSignal(0);

                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                            }
                            //else
                            //{
                            //    Console.WriteLine("Not connected");
                            //}
                            targetCout++;
                        }
                        sourceCount++;
                    }
                }
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable  = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
            return(gn);
        }
        private NeatGenome.NeatGenome generateHomogeneousGenomeES(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            List <PointF> hiddenNeuronPositions = new List <PointF>();

            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList();//(int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            List <PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List <PointF> inputNeuronPositions  = getNeuronGroupByType(0);


            EvolvableSubstrate se = new EvolvableSubstrate();

            se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                                   HyperNEATParameters.initialRes,
                                   (float)HyperNEATParameters.varianceThreshold,
                                   (float)HyperNEATParameters.bandingThreshold,
                                   (int)HyperNEATParameters.ESIterations,
                                   (float)HyperNEATParameters.divisionThreshold,
                                   HyperNEATParameters.maximumRes,
                                   InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref connections, ref hiddenNeuronPositions);

            HiddenCount = (uint)hiddenNeuronPositions.Count;

            float[] coordinates       = new float[5];
            uint    connectionCounter = (uint)connections.Count;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < InputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            // set up the output nodes
            for (uint a = 0; a < OutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < HiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[]      visited  = new bool[neurons.Count];
            List <uint> nodeList = new List <uint>();

            bool[] connectedToInput = new bool[neurons.Count];

            bool[] isOutput = new bool[neurons.Count];

            bool danglingConnection = true;

            while (danglingConnection)
            {
                bool[] hasIncomming = new bool[neurons.Count];

                foreach (ConnectionGene co in connections)
                {
                    //  if (co.SourceNeuronId != co.TargetNeuronId)
                    // {
                    hasIncomming[co.TargetNeuronId] = true;
                    // }
                }
                for (int i = 0; i < InputCount; i++)
                {
                    hasIncomming[i] = true;
                }

                bool[] hasOutgoing = new bool[neurons.Count];
                foreach (ConnectionGene co in connections)
                {
                    //  if (co.TargetNeuronId != co.SourceNeuronId)
                    //  {
                    if (co.TargetNeuronId != co.SourceNeuronId)  //neurons that only connect to themselfs don't count
                    {
                        hasOutgoing[co.SourceNeuronId] = true;
                    }
                    //  }
                }

                //Keep  output neurons
                for (int i = 0; i < OutputCount; i++)
                {
                    hasOutgoing[i + InputCount] = true;
                }


                danglingConnection = false;
                //Check if there are still dangling connections
                foreach (ConnectionGene co in connections)
                {
                    if (!hasOutgoing[co.TargetNeuronId] || !hasIncomming[co.SourceNeuronId])
                    {
                        danglingConnection = true;
                        break;
                    }
                }

                connections.RemoveAll(delegate(ConnectionGene m) { return(!hasIncomming[m.SourceNeuronId]); });
                connections.RemoveAll(delegate(ConnectionGene m) { return(!hasOutgoing[m.TargetNeuronId]); });
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            SharpNeatLib.NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(InputCount), (int)(OutputCount));
            //     SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable  = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;

            return(gn);
        }
Ejemplo n.º 26
0
		static public FloatFastConcurrentNetwork DecodeToFloatFastConcurrentNetwork(NeatGenome.NeatGenome g, IActivationFunction activationFn)
		{			
			int outputNeuronCount = g.OutputNeuronCount;
			int neuronGeneCount = g.NeuronGeneList.Count;

			// Slightly inefficient - determine the number of bias nodes. Fortunately there is not actually
			// any reason to ever have more than one bias node - although there may be 0.

            activationFunctionArray = new IActivationFunction[neuronGeneCount];
            
            int neuronGeneIdx=0;
			for(; neuronGeneIdx<neuronGeneCount; neuronGeneIdx++)
			{
                activationFunctionArray[neuronGeneIdx] = g.NeuronGeneList[neuronGeneIdx].ActivationFunction;
				if(g.NeuronGeneList[neuronGeneIdx].NeuronType != NeuronType.Bias)
					break;
			}
			int biasNodeCount = neuronGeneIdx;
			int inputNeuronCount = g.InputNeuronCount;
            for (; neuronGeneIdx < neuronGeneCount; neuronGeneIdx++)
            {
                activationFunctionArray[neuronGeneIdx] = g.NeuronGeneList[neuronGeneIdx].ActivationFunction;
            }
			
			// ConnectionGenes point to a neuron ID. We need to map this ID to a 0 based index for
			// efficiency. 
			
			// Use a quick heuristic to determine which will be the fastest technique for mapping the connection end points
			// to neuron indexes. This is heuristic is not 100% perfect but has been found to be very good in in real word 
			// tests. Feel free to perform your own calculation and create a more intelligent heuristic!
			int connectionCount=g.ConnectionGeneList.Count;
			if(neuronGeneCount * connectionCount < 45000)
			{	
				fastConnectionArray = new FloatFastConnection[connectionCount];
				int connectionIdx=0;
				for(int connectionGeneIdx=0; connectionGeneIdx<connectionCount; connectionGeneIdx++)
				{
                    //fastConnectionArray[connectionIdx] = new FloatFastConnection();
                    //Note. Binary search algorithm assume that neurons are ordered by their innovation Id.
					ConnectionGene connectionGene = g.ConnectionGeneList[connectionIdx];
					fastConnectionArray[connectionIdx].sourceNeuronIdx = (int)g.NeuronGeneList.BinarySearch(connectionGene.SourceNeuronId);
					fastConnectionArray[connectionIdx].targetNeuronIdx = (int)g.NeuronGeneList.BinarySearch(connectionGene.TargetNeuronId);

					System.Diagnostics.Debug.Assert(fastConnectionArray[connectionIdx].sourceNeuronIdx>=0 && fastConnectionArray[connectionIdx].targetNeuronIdx>=0, "invalid idx");

					fastConnectionArray[connectionIdx].weight = (float)connectionGene.Weight;

					connectionIdx++;
				}
			}
			else
			{
				// Build a table of indexes (ints) keyed on neuron ID. This approach is faster when dealing with large numbers 
				// of lookups.
				Hashtable neuronIndexTable = new Hashtable(neuronGeneCount);
				for(int i=0; i<neuronGeneCount; i++)
					neuronIndexTable.Add(g.NeuronGeneList[i].InnovationId, i);

				// Now we can build the connection array(s).
				//int connectionCount=g.ConnectionGeneList.Count;
				//FastConnection[] connectionArray = new FastConnection[connectionCount];
				fastConnectionArray = new FloatFastConnection[connectionCount];
				int connectionIdx=0;
				for(int connectionGeneIdx=0; connectionGeneIdx<connectionCount; connectionGeneIdx++)
				{
					ConnectionGene connectionGene = g.ConnectionGeneList[connectionIdx];
					fastConnectionArray[connectionIdx].sourceNeuronIdx = (int)neuronIndexTable[connectionGene.SourceNeuronId];
					fastConnectionArray[connectionIdx].targetNeuronIdx = (int)neuronIndexTable[connectionGene.TargetNeuronId];
					fastConnectionArray[connectionIdx].weight = (float)connectionGene.Weight;
					connectionIdx++;
				}
			}
			// Now sort the connection array on sourceNeuronIdx, secondary sort on targetNeuronIdx.
			// Using Array.Sort is 10 times slower than the hand-coded sorting routine. See notes on that routine for more 
			// information. Also note that in tests that this sorting did no t actually improve the speed of the network!
			// However, it may have a benefit for CPUs with small caches or when networks are very large, and since the new
			// sort takes up hardly any time for even large networks, it seems reasonable to leave in the sort.
			//Array.Sort(fastConnectionArray, fastConnectionComparer);
			//if(fastConnectionArray.Length>1)
			//	QuickSortFastConnections(0, fastConnectionArray.Length-1);
			
			return new FloatFastConcurrentNetwork(	biasNodeCount, inputNeuronCount, 
				outputNeuronCount, neuronGeneCount,
				fastConnectionArray, activationFunctionArray);
		}
        //saves a CPPN in dot file format.
        //Assumes that inputs are X1, Y1, X2, Y2, Z
        public static void saveCPPNasDOT(SharpNeatLib.NeatGenome.NeatGenome genome, string filename)
        {
            StreamWriter SW = File.CreateText(filename);

            SW.WriteLine("digraph g { ");

            String activationType = "";

            foreach (NeuronGene neuron in genome.NeuronGeneList)
            {
                switch (neuron.NeuronType)
                {
                case NeuronType.Bias: SW.WriteLine("N0 [shape=box, label=Bias]"); break;

                case NeuronType.Input:

                    string str = "?";
                    switch (neuron.InnovationId)
                    {
                    case 1: str = "X1"; break;

                    case 2: str = "Y1"; break;

                    case 3: str = "X2"; break;

                    case 4: str = "Y2"; break;

                    case 5: str = "Z"; break;
                    }
                    SW.WriteLine("N" + neuron.InnovationId + "[shape=box label=" + str + "]");
                    break;

                case NeuronType.Output: SW.WriteLine("N" + neuron.InnovationId + "[shape=triangle]"); break;

                case NeuronType.Hidden:
                    if (neuron.ActivationFunction.FunctionDescription.Equals("bipolar steepend sigmoid"))
                    {
                        activationType = "S";
                    }
                    if (neuron.ActivationFunction.FunctionDescription.Equals("bimodal gaussian"))
                    {
                        activationType = "G";
                    }
                    if (neuron.ActivationFunction.FunctionDescription.Equals("Linear"))
                    {
                        activationType = "L";
                    }
                    if (neuron.ActivationFunction.FunctionDescription.Equals("Sin function with doubled period"))
                    {
                        activationType = "Si";
                    }
                    if (neuron.ActivationFunction.FunctionDescription.Equals("Returns the sign of the input"))
                    {
                        activationType = "Sign";
                    }

                    SW.WriteLine("N" + neuron.InnovationId + "[shape=circle, label=N" + neuron.InnovationId + "_" + activationType + ", fillcolor=gray]");
                    break;
                }
            }

            foreach (ConnectionGene gene in genome.ConnectionGeneList)
            {
                SW.Write("N" + gene.SourceNeuronId + " -> N" + gene.TargetNeuronId + " ");

                if (gene.Weight > 0)
                {
                    SW.WriteLine("[color=black] ");
                }
                else if (gene.Weight < -0)
                {
                    SW.WriteLine("[color=red] [arrowType=inv]");
                }
            }

            //foreach (ModuleGene mg in genome.ModuleGeneList)
            //{
            //    foreach (uint sourceID in mg.InputIds)
            //    {
            //        foreach (uint targetID in mg.OutputIds)
            //        {
            //            SW.Write("N" + sourceID + " -> N" + targetID + " ");

            //            SW.WriteLine("[color=gray]");
            //        }
            //    }
            //}

            SW.WriteLine(" { rank=same; ");
            foreach (NeuronGene neuron in genome.NeuronGeneList)
            {
                if (neuron.NeuronType == NeuronType.Output)
                {
                    SW.WriteLine("N" + neuron.InnovationId);
                }
            }
            SW.WriteLine(" } ");


            SW.WriteLine(" { rank=same; ");
            foreach (NeuronGene neuron in genome.NeuronGeneList)
            {
                if (neuron.NeuronType == NeuronType.Input)
                {
                    SW.Write("N" + neuron.InnovationId + " ->");
                }
            }
            //Also the bias neuron on the same level
            SW.WriteLine("N0 [style=invis]");
            SW.WriteLine(" } ");

            SW.WriteLine("}");

            SW.Close();
        }
        // MPS NOT supported by this method
        private NeatGenome.NeatGenome generateHomogeneousGenomeES(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            if (useMultiPlaneSubstrate) throw new Exception("MPS not implemented for these parameters");
            //CHECK TO SEE IF HIDDEN NEURONS ARE OUTPUT NEURONS
            List<PointF> hiddenNeuronPositions = new List<PointF>();

            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList();//(int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            List<PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List<PointF> inputNeuronPositions = getNeuronGroupByType(0);


            SubstrateEvolution se = new SubstrateEvolution();

            se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                SubstrateEvolution.SAMPLE_WIDTH,
                SubstrateEvolution.SAMPLE_TRESHOLD,
                SubstrateEvolution.NEIGHBOR_LEVEL,
                SubstrateEvolution.INCREASE_RESSOLUTION_THRESHOLD,
                SubstrateEvolution.MIN_DISTANCE,
                SubstrateEvolution.CONNECTION_TRESHOLD, //0.4. ConnectionThreshold
                InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref connections, ref hiddenNeuronPositions);

            HiddenCount = (uint)hiddenNeuronPositions.Count;

            float[] coordinates = new float[5];
            uint connectionCounter = (uint)connections.Count;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < InputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            // set up the output nodes
            for (uint a = 0; a < OutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
                
            }
            // set up the hidden nodes
            for (uint a = 0; a < HiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }


            uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;
            uint c1, c2;

            float delta = 0.15f;//2.0f / InputCount;
            float minDistance, dist, sourceX = -1, sourceY = -1, targetX = -1, targetY = -1;
            uint closestNodeIndex;
            //   int index, hiddenCount;

            //Connections to input nodes    
            // hiddenCount = 0;

            //TEST??????????????????????????????

            double tolerance = 0.1;
            //bool[] taken = new bool[hiddenNeuronGroup.NeuronPositions.Count];
            closestNodeIndex = 0;
            int ccc;

            //CONNECT FROM INPUT NODES
            // ConnectionGeneList addConnections = new ConnectionGeneList();
            targetID = 0;
            bool[] visited = new bool[neurons.Count];
            List<uint> nodeList = new List<uint>();
            bool[] connectedToInput = new bool[neurons.Count];

            //From hidden to output
            //taken = new bool[hiddenNeuronGroup.NeuronPositions.Count];
            // float targetX=-1.0f, targetY=-1.0f;
            targetID = 0;
            //  bool outputConnectedToInput;

            bool[] isOutput = new bool[neurons.Count];
            //float output, weight;
            //bool[] connectedToInput = new bool[neurons.Count];

            //bool connectToHidden;

            float totalConnectionDist = 0.0f;
            //Add connections between Hidden Neurons
            // addConnections.AddRange(connections);

            bool danglingConnection = true;

            while (danglingConnection)
            {
                bool[] hasIncomming = new bool[neurons.Count];

                foreach (ConnectionGene co in connections)
                {
                    //  if (co.SourceNeuronId != co.TargetNeuronId)
                    // {
                    hasIncomming[co.TargetNeuronId] = true;
                    // }
                }
                for (int i = 0; i < InputCount; i++)
                    hasIncomming[i] = true;

                bool[] hasOutgoing = new bool[neurons.Count];
                foreach (ConnectionGene co in connections)
                {
                    //  if (co.TargetNeuronId != co.SourceNeuronId)
                    //  {
                    if (co.TargetNeuronId != co.SourceNeuronId)  //neurons that only connect to themselfs don't count
                    {
                        hasOutgoing[co.SourceNeuronId] = true;
                    }
                    //  }
                }

                //Keep  output neurons
                for (int i = 0; i < OutputCount; i++)
                    hasOutgoing[i + InputCount] = true;


                danglingConnection = false;
                //Check if there are still dangling connections
                foreach (ConnectionGene co in connections)
                {
                    if (!hasOutgoing[co.TargetNeuronId] || !hasIncomming[co.SourceNeuronId])
                    {
                        danglingConnection = true;
                        break;
                    }
                }

                connections.RemoveAll(delegate(ConnectionGene m) { return (!hasIncomming[m.SourceNeuronId]); });
                connections.RemoveAll(delegate(ConnectionGene m) { return (!hasOutgoing[m.TargetNeuronId]); });
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            SharpNeatLib.NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(InputCount), (int)(OutputCount));
        //     SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
       
            return gn;
        }
        public NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet, bool dirComm)
        {
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount) +
                numberOfAgents * (ReceiveCount * HiddenCount) + numberOfAgents * (HiddenCount * TransCount)));
            float[] coordinates = new float[5];
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;
            uint totalTransCount = TransCount * numberOfAgents;
            uint totalReceiveCount = ReceiveCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden|receive|transmit
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents + ReceiveCount * numberOfAgents + TransCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }
            // set up the receive nodes
            for (uint a = 0; a < totalReceiveCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents, NeuronType.Receive, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the transmit nodes
            for (uint a = 0; a < totalTransCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents + ReceiveCount * numberOfAgents, NeuronType.Transmit, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount + totalReceiveCount + totalTransCount];

            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                    case 3: sourceID = totalInputCount + totalOutputCount + totalHiddenCount + (agent * ReceiveCount) + ng.GlobalID + sourceCount; break; //Receive
                                    case 4: sourceID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + ng.GlobalID + sourceCount; break; //Transmit
                                }

                                switch (connectedNG.GroupType)
                                {
                                    case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                    case 3: targetID = totalInputCount + totalOutputCount + totalHiddenCount + (agent * ReceiveCount) + connectedNG.GlobalID + targetCout; break;
                                    case 4: targetID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //target node bias
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID] = true;
                                }

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A = network.GetOutputSignal(2);
                                    B = network.GetOutputSignal(3);
                                    C = network.GetOutputSignal(4);
                                    D = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            //Add Direct Communication connections
            if (dirComm)
            {
                uint numConnected = ReceiveCount / TransCount;
                agent = 0;

                foreach (float stackCoordinate in stackCoordinates)
                {
                    SortedList<float, uint> closestAgents = new SortedList<float, uint>();
                    uint i = 0;
                    foreach (float otherCoordinate in stackCoordinates)
                    {
                        if (i == agent) continue;
                        float delta = Math.Abs(stackCoordinate - otherCoordinate);
                        closestAgents.Add(delta, i);
                        i++;
                    }
                    uint[] orderedAgents = new uint[numberOfAgents];
                    closestAgents.Values.CopyTo(orderedAgents, 0);
                    uint[] connectedAgents = new uint[numConnected];
                    for (uint j = 0; j < numConnected; j++)
                    {
                        connectedAgents[j] = orderedAgents[j];
                    }

                    foreach (NeuronGroup ng in neuronGroups)
                    {
                        if (ng.GroupType != 4) continue;
                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {
                            uint sourceID = totalInputCount + totalOutputCount + totalHiddenCount + totalReceiveCount + (agent * TransCount) + ng.GlobalID + sourceCount;

                            foreach (uint connectedAgent in connectedAgents)
                            {
                                uint targetID = totalInputCount + totalOutputCount + totalHiddenCount + (connectedAgent * ReceiveCount) + (ng.GlobalID * numConnected) + agent;

                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, 1.0));
                            }

                            sourceCount++;
                        }
                    }

                    agent++;
                }

            }

            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
 /// <summary>
 /// Loads a genome from the specified XML file.
 /// </summary>
 public void loadGenome(String filename)
 {
     XmlDocument doc = new XmlDocument();
     doc.Load(filename);
     genome = XmlNeatGenomeReaderStatic.Read(doc);
     bestGenomeSoFar = genome;
 }
        private NeatGenome.NeatGenome generateHomogeneousGenomeES(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            List<PointF> hiddenNeuronPositions = new List<PointF>();

            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList();//(int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            List<PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List<PointF> inputNeuronPositions = getNeuronGroupByType(0);

            EvolvableSubstrate se = new EvolvableSubstrate();

            se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                HyperNEATParameters.initialRes,
                (float)HyperNEATParameters.varianceThreshold,
                (float)HyperNEATParameters.bandingThreshold,
                (int)HyperNEATParameters.ESIterations,
                (float)HyperNEATParameters.divisionThreshold,
                HyperNEATParameters.maximumRes,
                InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref connections, ref hiddenNeuronPositions);

            HiddenCount = (uint)hiddenNeuronPositions.Count;

            float[] coordinates = new float[5];
            uint connectionCounter = (uint)connections.Count;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < InputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            // set up the output nodes
            for (uint a = 0; a < OutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));

            }
            // set up the hidden nodes
            for (uint a = 0; a < HiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[] visited = new bool[neurons.Count];
            List<uint> nodeList = new List<uint>();
            bool[] connectedToInput = new bool[neurons.Count];

            bool[] isOutput = new bool[neurons.Count];

            bool danglingConnection = true;

            while (danglingConnection)
            {
                bool[] hasIncomming = new bool[neurons.Count];

                foreach (ConnectionGene co in connections)
                {
                    //  if (co.SourceNeuronId != co.TargetNeuronId)
                    // {
                    hasIncomming[co.TargetNeuronId] = true;
                    // }
                }
                for (int i = 0; i < InputCount; i++)
                    hasIncomming[i] = true;

                bool[] hasOutgoing = new bool[neurons.Count];
                foreach (ConnectionGene co in connections)
                {
                    //  if (co.TargetNeuronId != co.SourceNeuronId)
                    //  {
                    if (co.TargetNeuronId != co.SourceNeuronId)  //neurons that only connect to themselfs don't count
                    {
                        hasOutgoing[co.SourceNeuronId] = true;
                    }
                    //  }
                }

                //Keep  output neurons
                for (int i = 0; i < OutputCount; i++)
                    hasOutgoing[i + InputCount] = true;

                danglingConnection = false;
                //Check if there are still dangling connections
                foreach (ConnectionGene co in connections)
                {
                    if (!hasOutgoing[co.TargetNeuronId] || !hasIncomming[co.SourceNeuronId])
                    {
                        danglingConnection = true;
                        break;
                    }
                }

                connections.RemoveAll(delegate(ConnectionGene m) { return (!hasIncomming[m.SourceNeuronId]); });
                connections.RemoveAll(delegate(ConnectionGene m) { return (!hasOutgoing[m.TargetNeuronId]); });
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            SharpNeatLib.NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(InputCount), (int)(OutputCount));
            //     SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;

            return gn;
        }
Ejemplo n.º 32
0
 public void initializeEvolution(int populationSize, NeatGenome seedGenome)
 {
     if (seedGenome == null)
     {
         initializeEvolution(populationSize);
         return;
     }
     if (logOutput != null)
         logOutput.Close();
     logOutput = new StreamWriter(outputFolder + "logfile.txt");
     IdGenerator idgen = new IdGeneratorFactory().CreateIdGenerator(seedGenome);
     ea = new EvolutionAlgorithm(new Population(idgen, GenomeFactory.CreateGenomeList(seedGenome, populationSize, neatParams, idgen)), populationEval, neatParams);
 }
Ejemplo n.º 33
0
        static public ModularNetwork DecodeToModularNetwork(NeatGenome.NeatGenome g)
        {
            int inputCount = g.InputNeuronCount;
            int outputCount = g.OutputNeuronCount;
            int neuronCount = g.NeuronGeneList.Count;

            IActivationFunction[] activationFunctions = new IActivationFunction[neuronCount];
            float[] biasList = new float[neuronCount];

            Dictionary<uint, int> neuronLookup = new Dictionary<uint, int>(neuronCount);

            // Schrum: In case there are output neurons out of order
            g.NeuronGeneList.NeuronSortCheck();

            // Create an array of the activation functions for each non-module node node in the genome.
            // Start with a bias node if there is one in the genome.
            // The genome's neuron list is assumed to be ordered by type, with the bias node appearing first.
            int neuronGeneIndex = 0;
            for (; neuronGeneIndex < neuronCount; neuronGeneIndex++) {
                if (g.NeuronGeneList[neuronGeneIndex].NeuronType != NeuronType.Bias)
                    break;
                activationFunctions[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].ActivationFunction;
                neuronLookup.Add(g.NeuronGeneList[neuronGeneIndex].InnovationId, neuronGeneIndex);
            }
            int biasCount = neuronGeneIndex;
            // Schrum: debug
            //Console.WriteLine("start (after bias): " + g.GenomeId);

            // Schrum: Debugging
            //NeuronType expectedType = NeuronType.Input;

            for (; neuronGeneIndex < neuronCount; neuronGeneIndex++)
            {
                activationFunctions[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].ActivationFunction;
                // Schrum: Debug
                /*
                if (expectedType != g.NeuronGeneList[neuronGeneIndex].NeuronType)
                {
                    if (expectedType == NeuronType.Input && g.NeuronGeneList[neuronGeneIndex].NeuronType == NeuronType.Output)
                    {
                        expectedType = NeuronType.Output;
                    }
                    else if (expectedType == NeuronType.Output && g.NeuronGeneList[neuronGeneIndex].NeuronType == NeuronType.Hidden)
                    {
                        expectedType = NeuronType.Hidden;
                    }
                    else
                    {
                        // Error condition:
                        Console.WriteLine("Error with genome: " + g.GenomeId);

                        XmlDocument doc = new XmlDocument();
                        XmlGenomeWriterStatic.Write(doc, (SharpNeatLib.NeatGenome.NeatGenome)g);
                        FileInfo oFileInfo = new FileInfo("ProblemGenome.xml");
                        doc.Save(oFileInfo.FullName);

                        Environment.Exit(1);
                    }
                }
                */
                neuronLookup.Add(g.NeuronGeneList[neuronGeneIndex].InnovationId, neuronGeneIndex);
                biasList[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].Bias;
            }

            // Create an array of the activation functions, inputs, and outputs for each module in the genome.
            ModulePacket[] modules = new ModulePacket[g.ModuleGeneList.Count];
            for (int i = g.ModuleGeneList.Count - 1; i >= 0; i--) {
                modules[i].function = g.ModuleGeneList[i].Function;
                // Must translate input and output IDs to array locations.
                modules[i].inputLocations = new int[g.ModuleGeneList[i].InputIds.Count];
                for (int j = g.ModuleGeneList[i].InputIds.Count - 1; j >= 0; j--) {
                    modules[i].inputLocations[j] = neuronLookup[g.ModuleGeneList[i].InputIds[j]];
                }
                modules[i].outputLocations = new int[g.ModuleGeneList[i].OutputIds.Count];
                for (int j = g.ModuleGeneList[i].OutputIds.Count - 1; j >= 0; j--) {
                    modules[i].outputLocations[j] = neuronLookup[g.ModuleGeneList[i].OutputIds[j]];
                }
            }

            // ConnectionGenes point to a neuron's innovation ID. Translate this ID to the neuron's index in the neuron array. 
            FloatFastConnection[] connections = new FloatFastConnection[g.ConnectionGeneList.Count];
            for (int connectionGeneIndex = g.ConnectionGeneList.Count - 1; connectionGeneIndex >= 0; connectionGeneIndex--) {
                ConnectionGene connectionGene = g.ConnectionGeneList[connectionGeneIndex];
                connections[connectionGeneIndex].sourceNeuronIdx = neuronLookup[connectionGene.SourceNeuronId];
                connections[connectionGeneIndex].targetNeuronIdx = neuronLookup[connectionGene.TargetNeuronId];
                connections[connectionGeneIndex].weight = (float)connectionGene.Weight;

                connections[connectionGeneIndex].learningRate = connectionGene.learningRate;
                connections[connectionGeneIndex].A = connectionGene.A;
                connections[connectionGeneIndex].B = connectionGene.B;
                connections[connectionGeneIndex].C = connectionGene.C;
                connections[connectionGeneIndex].D = connectionGene.D;
                connections[connectionGeneIndex].modConnection = connectionGene.modConnection;

            }

            ModularNetwork mn = new ModularNetwork(biasCount, inputCount, outputCount, g.OutputsPerPolicy, neuronCount, connections, biasList, activationFunctions, modules);
            if (g.networkAdaptable) mn.adaptable = true;
            if (g.networkModulatory) mn.modulatory = true;

            mn.genome = g;
            return mn;
        }
        // MPS support on the Hive methods only
        #region Generate heterogenous genomes with z-stack

        // MPS NOT supported by this method
        private NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            if (useMultiPlaneSubstrate) throw new Exception("MPS not implemented for these parameters");
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));
            float[] coordinates = new float[5];
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {

                            //-----------------Get the bias of the source node
                            switch (ng.GroupType)
                            {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                            }
                            coordinates[0] = source.X; coordinates[1] = source.Y; coordinates[2] = 0.0f; coordinates[3] = 0.0f;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            network.RecursiveActivation();//network.MultipleSteps(iterations);

                            neurons[(int)sourceID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                    case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                             //Input
                                    case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                //Output
                                    case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;  //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                    case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;
                                    case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                network.RecursiveActivation();//network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A = network.GetOutputSignal(2);
                                    B = network.GetOutputSignal(3);
                                    C = network.GetOutputSignal(4);
                                    D = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates));
                                    }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }
        private void ShowGenomeNetwork(NeatGenome genome)
        {

            NetworkControl networkControl = new NetworkControl();
            networkControl.Dock = DockStyle.Fill;
            panelNetWorkViewer.Controls.Clear();
            panelNetWorkViewer.Controls.Add(networkControl);

            /* create network model to draw the network */
            NetworkModel networkModel = GenomeDecoder.DecodeToNetworkModel(genome);
            GridLayoutManager layoutManager = new GridLayoutManager();
            layoutManager.Layout(networkModel, networkControl.Size);
            networkControl.NetworkModel = networkModel;
        }
Ejemplo n.º 36
0
		/// <summary>
		/// Copy constructor.
		/// </summary>
		/// <param name="copyFrom"></param>
		public NeatGenome(NeatGenome copyFrom, uint genomeId)
		{
			this.genomeId = genomeId;
            this.parent = copyFrom;

			// No need to loop the arrays to clone each element because NeuronGene and ConnectionGene are 
			// value data types (structs).
			neuronGeneList = new NeuronGeneList(copyFrom.neuronGeneList);
            moduleGeneList = new List<ModuleGene>(copyFrom.moduleGeneList);
			connectionGeneList = new ConnectionGeneList(copyFrom.connectionGeneList);

            //joel
            if(copyFrom.Behavior!=null)
            Behavior = new SharpNeatLib.BehaviorType(copyFrom.Behavior);
            
			inputNeuronCount = copyFrom.inputNeuronCount;
            // Schrum: Removed (not used)
			//inputAndBiasNeuronCount = copyFrom.inputNeuronCount+1;
			outputNeuronCount = copyFrom.outputNeuronCount;
            // Schrum: removed (not used)
			//inputBiasOutputNeuronCount = copyFrom.inputBiasOutputNeuronCount;
			//inputBiasOutputNeuronCountMinus2 = copyFrom.inputBiasOutputNeuronCountMinus2;
            // Schrum: Added
            outputsPerPolicy = copyFrom.outputsPerPolicy;

			Debug.Assert(connectionGeneList.IsSorted(), "ConnectionGeneList is not sorted by innovation ID");
		}
Ejemplo n.º 37
0
        /// <summary>
        /// Initializes the EA with an initial population generated from a single seed genome.
        /// </summary>
        public void initializeEvolution(int populationSize, NeatGenome seedGenome)
        {
            if (seedGenome == null)
            {
                initializeEvolution(populationSize);
                return;
            }

            LogOutput = Logging ? new StreamWriter(Path.Combine(OutputFolder, "log.txt")) : null;
            FinalPositionOutput = FinalPositionLogging ? new StreamWriter(Path.Combine(OutputFolder,"final-position.txt")) : null;
            ArchiveModificationOutput = FinalPositionLogging ? new StreamWriter(Path.Combine(OutputFolder, "archive-mods.txt")) : null;
            ComplexityOutput = new StreamWriter(Path.Combine(OutputFolder, "complexity.txt"));
            ComplexityOutput.WriteLine("avg,stdev,min,max");
            if (FinalPositionLogging)
            {
                FinalPositionOutput.WriteLine("x,y");
                ArchiveModificationOutput.WriteLine("ID,action,time,x,y");
            }
            IdGenerator idgen = new IdGeneratorFactory().CreateIdGenerator(seedGenome);
            EA = new EvolutionAlgorithm(new Population(idgen, GenomeFactory.CreateGenomeList(seedGenome, populationSize, experiment.DefaultNeatParameters, idgen)), experiment.PopulationEvaluator, experiment.DefaultNeatParameters);
            EA.outputFolder = OutputFolder;
            EA.neatBrain = NEATBrain;
        }
Ejemplo n.º 38
0
		/// <summary>
		/// Asexual reproduction with built in mutation.
		/// </summary>
		/// <returns></returns>
		public override IGenome CreateOffspring_Asexual(EvolutionAlgorithm ea)
		{
			// Make an exact copy this Genome.
			NeatGenome offspring = new NeatGenome(this, ea.NextGenomeId);

			// Mutate the new genome.
			offspring.Mutate(ea);
			return offspring;
		}