public GeomPlayerSubstrate(bool useBias,
                                   IActivationFunction activationFunction, int cppnOutIndex)
        {
            m_delta = 2.0f / 5.0f;
            m_start = -1.0f + (m_delta * 0.5f);

            m_activationFunction = activationFunction;

            m_neurons = new NeuronGeneList(m_inputsCount + m_outputsCount + (m_useBias ? 1 : 0));

            uint curNeurId = 0;

            if (m_useBias)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Bias, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            for (int i = 0; i < m_inputsCount; i++)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            for (int i = 0; i < m_outputsCount; i++)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Output, m_activationFunction));
            }
        }
Пример #2
0
        // ControllerSubstrate constructor
        public ControllerSubstrate(uint input, uint output, uint hidden, IActivationFunction function, uint rgbSize = 10) : base(input, output, hidden, function)
        {
            rgbOneDimension  = rgbSize;
            colorArraySize   = rgbOneDimension * rgbOneDimension;
            headingArraySize = 8;

            neurons.Clear();

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(inputCount + outputCount + hiddenCount));

            // set up the bias nodes
            for (uint a = 0; a < biasCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Bias, ActivationFunctionFactory.GetActivationFunction("Linear"), 0.0f));
            }

            // set up the input nodes
            for (uint a = 0; a < inputCount; a++)
            {
                neurons.Add(new NeuronGene(a + biasCount, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("Linear"), 0.0f));
            }

            // set up the output nodes
            for (uint a = 0; a < outputCount; a++)
            {
                neurons.Add(new NeuronGene(a + biasCount + inputCount, NeuronType.Output, ActivationFunctionFactory.GetActivationFunction("BipolarSigmoid"), 1.0f));
            }

            // set up the hidden nodes
            for (uint a = 0; a < hiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + biasCount + inputCount + outputCount, NeuronType.Hidden, ActivationFunctionFactory.GetActivationFunction("BipolarSigmoid"), 0.5f));
            }
        }
Пример #3
0
        public BallOwnerAwareSubstrate(int rows, int cols, IActivationFunction activationFunction)
        {
            m_rows = rows;
            m_cols = cols;

            m_rowDelta = 2.0f / m_rows;
            m_colDelta = 2.0f / m_cols;

            m_activationFunction = activationFunction;

            m_neurons = new NeuronGeneList(2 * (m_rows * m_cols) + (m_useBias ? 1 : 0));

            uint curNeurId = 0;

            if (m_useBias)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Bias, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            for (int i = 0, len = m_rows * m_cols; i < len; i++)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));

            for (int i = 0, len = m_rows * m_cols; i < len; i++)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Output, m_activationFunction));
            }
        }
        /// <summary>
        /// Performs multiple generations of the evolutionary algorithm.
        /// </summary>
        public void evolve(int generations)
        {
            for (int j = 0; j < generations; j++)
            {
                oneGeneration(j);
            }
            LogOutput.Close();

            XmlDoc = new XmlDocument();
            XmlGenomeWriterStatic.Write(XmlDoc, (NeatGenome)EA.BestGenome, ActivationFunctionFactory.GetActivationFunction("NullFn"));
            OutputFileInfo = new FileInfo(Path.Combine(OutputFolder, "bestGenome.xml"));
            XmlDoc.Save(OutputFileInfo.FullName);


            //if doing novelty search, write out archive
            if (experiment.DefaultNeatParameters.noveltySearch)
            {
                XmlDocument archiveout = new XmlDocument();

                XmlPopulationWriter.WriteGenomeList(archiveout, EA.noveltyFixed.archive);
                OutputFileInfo = new FileInfo(Path.Combine(OutputFolder, "archive.xml"));
                archiveout.Save(OutputFileInfo.FullName);
            }

            // dump the MapElites grid contents (empty if we aren'type doing ME and we aren'type tracking ME-style-grid for some other algorithm)
            XmlDocument gridout = new XmlDocument();

            XmlPopulationWriter.WriteGenomeList(gridout, EA.meDumpGrid());
            OutputFileInfo = new FileInfo(Path.Combine(OutputFolder, "finalgrid.xml"));
            gridout.Save(OutputFileInfo.FullName);
        }
Пример #5
0
        public TwoLayerSandwichSubstrate(int rows, int cols, bool useBias,
                                         IActivationFunction activationFunction, int cppnOutIndex)
        {
            m_rows         = rows;
            m_cols         = cols;
            m_useBias      = useBias;
            m_cppnOutIndex = cppnOutIndex;

            m_rowDelta = 2.0f / m_rows;
            m_colDelta = 2.0f / m_cols;

            m_activationFunction = activationFunction;

            m_neurons = new NeuronGeneList(2 * (m_rows * m_cols) + (m_useBias ? 1 : 0));

            uint curNeurId = 0;

            if (m_useBias)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Bias, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            for (int i = 0, len = m_rows * m_cols; i < len; i++)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            for (int i = 0, len = m_rows * m_cols; i < len; i++)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Output, m_activationFunction));
            }
        }
Пример #6
0
        public void ZeroInputTest()
        {
            var function = ActivationFunctionFactory.Get(EActivationFunctionType.Sum);

            var result = function.Impuls(0);

            Assert.AreEqual(0, result);
        }
        private static Neuron ReadNeuron(XmlElement xmlNeuron)
        {
            uint       id           = uint.Parse(XmlUtilities.GetAttributeValue(xmlNeuron, "id"));
            NeuronType neuronType   = XmlUtilities.GetNeuronType(XmlUtilities.GetAttributeValue(xmlNeuron, "type"));
            string     activationFn = XmlUtilities.GetAttributeValue(xmlNeuron, "activationFunction");

            return(new Neuron(ActivationFunctionFactory.GetActivationFunction(activationFn), neuronType, id));
        }
Пример #8
0
        public void NegativeInputTest()
        {
            var function = ActivationFunctionFactory.Get(EActivationFunctionType.Sum);

            var result = function.Impuls(-1);

            Assert.AreEqual(-1, result);
        }
        private static NeuronGene ReadNeuronGene(XmlElement xmlNeuronGene)
        {
            uint       id           = uint.Parse(XmlUtilities.GetAttributeValue(xmlNeuronGene, "id"));
            NeuronType neuronType   = XmlUtilities.GetNeuronType(XmlUtilities.GetAttributeValue(xmlNeuronGene, "type"));
            string     activationFn = XmlUtilities.GetAttributeValue(xmlNeuronGene, "activationFunction");
            double     layer        = double.Parse(XmlUtilities.GetAttributeValue(xmlNeuronGene, "layer"));

            return(new NeuronGene(null, id, layer, neuronType, ActivationFunctionFactory.GetActivationFunction(activationFn)));
        }
Пример #10
0
        private static void setSubstrateActivationFunction()
        {
            string parameter = getParameter("substrateactivationfunction");

            if (parameter != null)
            {
                substrateActivationFunction = ActivationFunctionFactory.GetActivationFunction(parameter);
            }
        }
Пример #11
0
        private static NeuronGene ReadNeuronGene(XmlElement xmlNeuronGene)
        {
            uint       id           = uint.Parse(XmlUtilities.GetAttributeValue(xmlNeuronGene, "id"));
            float      layer        = (float)Convert.ToDouble(XmlUtilities.GetAttributeValue(xmlNeuronGene, "layer"));
            NeuronType neuronType   = XmlUtilities.GetNeuronType(XmlUtilities.GetAttributeValue(xmlNeuronGene, "type"));
            string     activationFn = XmlUtilities.GetAttributeValue(xmlNeuronGene, "activationFunction");

            return(new NeuronGene(id, neuronType, ActivationFunctionFactory.GetActivationFunction(activationFn), layer));
        }
Пример #12
0
        public Neuron(int index, float biasWeight)
        {
            Index      = index;
            BiasWeight = biasWeight;
            Inputs     = new List <Synapse>();

            // Default activation function shall be sigmoid
            ActivationFunction = ActivationFunctionFactory.CreateDefaultFunction();
        }
Пример #13
0
        /// <summary>
        /// Constructor that manually generates a NN controller (used mostly for debugging).
        /// </summary>
        /// <param name="x">X coordinate of initial position.</param>
        /// <param name="y">Y coordinate of intial position.</param>
        /// <param name="numSensors">Number of sensor components. Each agent has one sensor composed of at least 3 components.</param>
        public NNControlledCreature(Texture2D bodyTexture, float x, float y, float headingInRadians, Simulator sim, bool drawSensorField_in, bool trackPlanting_in, int numSensors, bool freezeAfterPlanting_in)
            : base(bodyTexture, x, y, headingInRadians, sim, drawSensorField_in, trackPlanting_in, numSensors)
        {
            PlanterSubstrate substrate = new PlanterSubstrate(308, 4, 108, new BipolarSigmoid());
            NeatGenome       genome    = substrate.generateGenome();

            Controller          = genome.Decode(ActivationFunctionFactory.GetActivationFunction("BipolarSigmoid"));
            freezeAfterPlanting = freezeAfterPlanting_in;
            frozen = false;
        }
Пример #14
0
        public NeuralNet(NeuralNetParameters parameters)
        {
            wagesBetweenInputAndFirstHiddenLayer = ArrayAllocatorUtils.Allocate <double>(parameters.InputLayerSize, parameters.HiddenLayerSize);
            wagesBetweenHiddenLayers             = ArrayAllocatorUtils.Allocate <double>(parameters.NumberOfHiddenLayers - 1, parameters.HiddenLayerSize, parameters.HiddenLayerSize);
            wagesBetweenLastHiddenAndOutputLayer = ArrayAllocatorUtils.Allocate <double>(parameters.HiddenLayerSize, parameters.OutputLayerSize);
            biasesInHiddenLayers = ArrayAllocatorUtils.Allocate <double>(parameters.NumberOfHiddenLayers, parameters.HiddenLayerSize);
            biasesInOutputLayer  = ArrayAllocatorUtils.Allocate <double>(parameters.OutputLayerSize);

            this.activationFunctionType = parameters.ActivationFunctionType;
            activationFunction          = ActivationFunctionFactory.Get(activationFunctionType);
        }
Пример #15
0
        public Substrate(uint biasCount, uint inputCount, uint outputCount, uint hiddenCount, IActivationFunction function)
        {
            this.biasCount          = biasCount;
            this.inputCount         = inputCount;
            this.outputCount        = outputCount;
            this.hiddenCount        = hiddenCount;
            this.activationFunction = function;

            weightRange = HyperNEATParameters.weightRange;
            threshold   = HyperNEATParameters.threshold;

            if (inputCount != 0)
            {
                inputDelta = 2.0f / (inputCount);
            }
            if (hiddenCount != 0)
            {
                hiddenDelta = 2.0f / (hiddenCount);
            }
            if (outputCount != 0)
            {
                outputDelta = 2.0f / (outputCount);
            }

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(inputCount + outputCount + hiddenCount));

            // set up the bias nodes
            for (uint a = 0; a < biasCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Bias, ActivationFunctionFactory.GetActivationFunction("NullFn"), 0.0f));
            }

            // set up the input nodes
            for (uint a = 0; a < inputCount; a++)
            {
                neurons.Add(new NeuronGene(a + biasCount, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn"), 0.0f));
            }

            // set up the output nodes
            for (uint a = 0; a < outputCount; a++)
            {
                neurons.Add(new NeuronGene(a + biasCount + inputCount, NeuronType.Output, activationFunction, 1.0f));
            }

            // set up the hidden nodes
            for (uint a = 0; a < hiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + biasCount + inputCount + outputCount, NeuronType.Hidden, activationFunction, 0.5f));
            }
        }
Пример #16
0
        public void oneGeneration(int currentGeneration)
        {
            DateTime dt = DateTime.Now;

            ea.PerformOneGeneration();
            if (ea.BestGenome.RealFitness > maxFitness)
            {
                simExperiment.bestGenomeSoFar = (NeatGenome)ea.BestGenome;
                maxFitness = ea.BestGenome.RealFitness;
                doc        = new XmlDocument();
                XmlGenomeWriterStatic.Write(doc, (NeatGenome)ea.BestGenome);
                oFileInfo = new FileInfo(outputFolder + "bestGenome" + currentGeneration.ToString() + ".xml");
                doc.Save(oFileInfo.FullName);
            }
            //Console.WriteLine(ea.Generation.ToString() + " " + ea.BestGenome.RealFitness + " "  + ea.Population.GenomeList.Count + " " + (DateTime.Now.Subtract(dt)));
            // Schrum: Changed this to include fitness values from each environment: Mainly for FourTasks
            Console.WriteLine(ea.Generation.ToString() + " " + ea.BestGenome.RealFitness + " " + ea.Population.GenomeList.Count + " (" + string.Join(",", ea.BestGenome.Behavior.objectives) + ") " + (DateTime.Now.Subtract(dt)) + " " + ea.BestGenome.Behavior.modules + " " + ea.BestGenome.Behavior.cppnLinks + " " + ea.BestGenome.Behavior.substrateLinks);
            int gen_mult = 200;

            if (logging)
            {
                if (experiment.DefaultNeatParameters.noveltySearch && currentGeneration % gen_mult == 0)
                {
                    XmlDocument archiveout = new XmlDocument();

                    XmlPopulationWriter.WriteGenomeList(archiveout, ea.noveltyFixed.archive);
                    oFileInfo = new FileInfo(outputFolder + "archive.xml");
                    archiveout.Save(oFileInfo.FullName);
                }

                if ((experiment.DefaultNeatParameters.noveltySearch || experiment.DefaultNeatParameters.multiobjective) && currentGeneration % gen_mult == 0)
                {
                    XmlDocument popout = new XmlDocument();
                    if (!experiment.DefaultNeatParameters.multiobjective)
                    {
                        XmlPopulationWriter.Write(popout, ea.Population, ActivationFunctionFactory.GetActivationFunction("NullFn"));
                    }
                    else
                    {
                        XmlPopulationWriter.WriteGenomeList(popout, ea.multiobjective.population);
                    }

                    oFileInfo = new FileInfo(outputFolder + "population" + currentGeneration.ToString() + ".xml");
                    popout.Save(oFileInfo.FullName);
                }
                // Schrum: Added contents of objective array to log so individual environment scores can be seen in FourTasks domain
                // Also always print modules, cppn links, and substrate links
                logOutput.WriteLine(ea.Generation.ToString() + " " + (maxFitness).ToString() + " " + string.Join(" ", ea.BestGenome.Behavior.objectives) + " " + ea.BestGenome.Behavior.modules + " " + ea.BestGenome.Behavior.cppnLinks + " " + ea.BestGenome.Behavior.substrateLinks);
            }
        }
Пример #17
0
        public NeuralNet Crossover(NeuralNet other)
        {
            if (other is NeuralNet == false)
            {
                throw new ArgumentException();
            }

            var parsedOther = other as NeuralNet;

            var child = new NeuralNet(
                new NeuralNetParameters()
            {
                ActivationFunctionType = activationFunctionType,
                InputLayerSize         = wagesBetweenInputAndFirstHiddenLayer.Length,
                HiddenLayerSize        = wagesBetweenLastHiddenAndOutputLayer[0].Length,
                NumberOfHiddenLayers   = wagesBetweenHiddenLayers.Length + 1,
                OutputLayerSize        = biasesInOutputLayer.Length
            });

            child.wagesBetweenInputAndFirstHiddenLayer =
                CrossoverUtils.Crossover(wagesBetweenInputAndFirstHiddenLayer, parsedOther.wagesBetweenInputAndFirstHiddenLayer);

            for (int i = 0; i < wagesBetweenHiddenLayers.Length; i++)
            {
                child.wagesBetweenHiddenLayers[i] =
                    CrossoverUtils.Crossover(wagesBetweenHiddenLayers[i], parsedOther.wagesBetweenHiddenLayers[i]);
            }
            for (int i = 0; i < biasesInHiddenLayers.Length; i++)
            {
                child.biasesInHiddenLayers[i] =
                    CrossoverUtils.Crossover(biasesInHiddenLayers[i], parsedOther.biasesInHiddenLayers[i]);
            }

            child.wagesBetweenLastHiddenAndOutputLayer =
                CrossoverUtils.Crossover(wagesBetweenLastHiddenAndOutputLayer, parsedOther.wagesBetweenLastHiddenAndOutputLayer);

            child.biasesInOutputLayer =
                CrossoverUtils.Crossover(biasesInOutputLayer, parsedOther.biasesInOutputLayer);

            if (StaticRandom.R.NextDouble() < 0.5)
            {
                child.activationFunctionType = other.activationFunctionType;
            }

            child.activationFunction = ActivationFunctionFactory.Get(child.activationFunctionType);

            return(child);
        }
Пример #18
0
        public void oneGeneration(int currentGeneration)
        {
            DateTime dt = DateTime.Now;

            ea.PerformOneGeneration();
            if (ea.BestGenome.RealFitness > maxFitness)
            {
                //simExperiment.bestGenomeSoFar = (NeatGenome)ea.BestGenome;
                maxFitness = ea.BestGenome.RealFitness;
                doc        = new XmlDocument();
                XmlGenomeWriterStatic.Write(doc, (NeatGenome)ea.BestGenome);
                oFileInfo = new FileInfo(outputFolder + "bestGenome" + currentGeneration.ToString() + ".xml");
                doc.Save(oFileInfo.FullName);
            }
            Console.WriteLine(ea.Generation.ToString() + " " + ea.BestGenome.RealFitness + " " + ea.Population.GenomeList.Count + " " + (DateTime.Now.Subtract(dt)));
            int gen_mult = 200;

            if (logging)
            {
                if (neatParams.noveltySearch && currentGeneration % gen_mult == 0)
                {
                    XmlDocument archiveout = new XmlDocument();

                    XmlPopulationWriter.WriteGenomeList(archiveout, ea.noveltyFixed.archive);
                    oFileInfo = new FileInfo(outputFolder + "archive.xml");
                    archiveout.Save(oFileInfo.FullName);
                }

                if ((neatParams.noveltySearch || neatParams.multiobjective) && currentGeneration % gen_mult == 0)
                {
                    XmlDocument popout = new XmlDocument();
                    if (!neatParams.multiobjective)
                    {
                        XmlPopulationWriter.Write(popout, ea.Population, ActivationFunctionFactory.GetActivationFunction("NullFn"));
                    }
                    else
                    {
                        XmlPopulationWriter.WriteGenomeList(popout, ea.multiobjective.population);
                    }

                    oFileInfo = new FileInfo(outputFolder + "population" + currentGeneration.ToString() + ".xml");
                    popout.Save(oFileInfo.FullName);
                }

                logOutput.WriteLine(ea.Generation.ToString() + " " + (maxFitness).ToString());
            }
        }
        public static void loadParameterFile()
        {
            try
            {
                System.IO.StreamReader input = new System.IO.StreamReader(@"params.txt");
                string[] line;
                double   probability;
                bool     readingActivation = false;
                while (!input.EndOfStream)
                {
                    line = input.ReadLine().Split(' ');
                    if (line[0].Equals("StartActivationFunctions"))
                    {
                        readingActivation = true;
                    }
                    else if (line[0].Equals("EndActivationFunctions"))
                    {
                        readingActivation = false;
                    }
                    else
                    {
                        if (readingActivation)
                        {
                            double.TryParse(line[1], out probability);
                            activationFunctions.Add(line[0], probability);
                        }
                        else
                        {
                            parameters.Add(line[0].ToLower(), line[1]);
                        }
                    }
                }
            }
            catch (Exception e)
            {
                System.Console.WriteLine(e.Message);
                System.Console.WriteLine("Error reading config file, check file location and formation. Using default parameters");
                loadDefaultParameters();
            }

            ActivationFunctionFactory.setProbabilities(activationFunctions);

            setParameterDouble("threshold", ref threshold);
            setParameterDouble("weightrange", ref weightRange);
            setParameterInt("numberofthreads", ref numThreads);
            setSubstrateActivationFunction();
        }
Пример #20
0
        public MultiLayerSandwichSubstrate(int rows, int cols, int numLayers, bool useBias, IActivationFunction activationFunction)
        {
            m_rows      = rows;
            m_cols      = cols;
            m_numLayers = numLayers;
            m_useBias   = useBias;

            m_rowDelta = 2.0f / m_rows;
            m_colDelta = 2.0f / m_cols;

            m_activationFunction = activationFunction;

            // add only one bias
            m_neurons = new NeuronGeneList(m_numLayers * (m_rows * m_cols) + (m_useBias ? 1 : 0));

            uint curNeurId = 0;

            // inserting the bias if needed
            if (m_useBias)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Bias, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            // inserting input layer neurons
            for (int i = 0, len = m_rows * m_cols; i < len; i++)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            // inserting output layer neurons
            for (int i = 0, len = m_rows * m_cols; i < len; i++)
            {
                m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Output, m_activationFunction));
            }

            // inserting hidden layer neurons
            for (int li = 0; li < m_numLayers - 2; li++)
            {
                for (int i = 0, len = m_rows * m_cols; i < len; i++)
                {
                    m_neurons.Add(new NeuronGene(curNeurId++, NeuronType.Hidden, m_activationFunction));
                }
            }
        }
Пример #21
0
        public void evolve(int generations)
        {
            for (int j = 0; j < generations; j++)
            {
                oneGeneration(j);
            }
            logOutput.Close();

            doc = new XmlDocument();
            XmlGenomeWriterStatic.Write(doc, (NeatGenome)ea.BestGenome, ActivationFunctionFactory.GetActivationFunction("NullFn"));
            oFileInfo = new FileInfo(outputFolder + "bestGenome.xml");
            doc.Save(oFileInfo.FullName);

            //if doing novelty search, write out archive
            if (neatParams.noveltySearch)
            {
                XmlDocument archiveout = new XmlDocument();

                XmlPopulationWriter.WriteGenomeList(archiveout, ea.noveltyFixed.archive);
                oFileInfo = new FileInfo(outputFolder + "archive.xml");
                archiveout.Save(oFileInfo.FullName);
            }
        }
Пример #22
0
        protected override void Initialize()
        {
            int divider = (int)Math.Ceiling((double)(256.0 / numDimsPerRGBAxis));

            // Iterate through each snapshot (each subdirectory of logsfolder)
            List <String> dirEnum = new List <String>(Directory.EnumerateDirectories(logsFolder));

            foreach (string snapshotNum in dirEnum)
            {
                if (!analyzePlantingRatesOnly)
                {
                    // First, get bins for single snapshot of the world
                    activatedNodes          = new List <GraphNode>();
                    disjointSets            = new List <List <GraphNode> >();
                    numNonTransparentPixels = 0;

                    // Populate the components list
                    if (File.Exists(snapshotNum + "\\Components.txt"))
                    {
                        using (System.IO.StreamReader reader = new System.IO.StreamReader(snapshotNum + "\\Components.txt"))
                        {
                            // The first line is the current creature's ID; skip it
                            reader.ReadLine();

                            int                  genomeID, folderNumber;
                            String               nextLine;
                            NeatGenome           morphologyCPPNGenome;
                            INetwork             morphologyCPPN;
                            Texture2D            newMorphology;
                            NNControlledCreature newCreature;
                            while ((nextLine = reader.ReadLine()) != null)
                            {
                                // First get the ID of the morphology
                                genomeID = Convert.ToInt32(nextLine);

                                // Then go find the genome corresponding to that ID
                                folderNumber = (genomeID - 1) / numCreaturesPerFolder;

                                // Load the creature's morphology CPPN
                                morphologyCPPNGenome = loadCPPNFromXml(logsFolder + folderNumber.ToString() + "\\" + morphologyXMLprefix + genomeID.ToString() + ".xml");
                                morphologyCPPN       = morphologyCPPNGenome.Decode(ActivationFunctionFactory.GetActivationFunction("BipolarSigmoid"));
                                newMorphology        = generateMorphology(morphologyCPPN);

                                // Create a new creature, which will automatically add it to the components list
                                newCreature = new NNControlledCreature(newMorphology, initialBoardWidth / 2, initialBoardWidth / 2, 0.0f, null, this, drawSensorField, trackPlanting, defaultNumSensors, freezeAfterPlanting, null, morphologyCPPNGenome);
                                newCreature.Genome.fileNumber = genomeID;
                            }
                        }
                    }


                    // Loop through the parent list and find the number of pixels that belong in each bin
                    numNonTransparentPixels = 0;
                    parentQueueGraph        = new GraphNode[numDimsPerRGBAxis, numDimsPerRGBAxis, numDimsPerRGBAxis];

                    if (File.Exists(snapshotNum + "\\ParentList.txt"))
                    {
                        using (System.IO.StreamReader reader = new System.IO.StreamReader(snapshotNum + "\\ParentList.txt")) //parent list contains index into components list
                        {
                            // Skip first line, then read the rest
                            reader.ReadLine();
                            while ((nextLine = reader.ReadLine()) != null)
                            {
                                // Find the decoded individual (index into components list)
                                NNControlledCreature currentCreature = (NNControlledCreature)(Components[Convert.ToInt32(nextLine) - 1]);

                                summedR = 0;
                                summedG = 0;
                                summedB = 0;
                                numNonTransparentPixels = 0;

                                // calculate the RGB ratios
                                foreach (Color pixel in currentCreature.TextureAsColorArray)
                                {
                                    if (pixel.A != 0)
                                    {
                                        numNonTransparentPixels++;

                                        summedR += pixel.R;
                                        summedG += pixel.G;
                                        summedB += pixel.B;
                                    }
                                }

                                // Add individual to one bin based on R, G, B ratios
                                // To get ratios, just sum the R, G, B values divided by number of nontransparent pixels
                                if (parentQueueGraph[summedR / (divider * numNonTransparentPixels), summedG / (divider * numNonTransparentPixels), summedB / (divider * numNonTransparentPixels)] == null)
                                {
                                    parentQueueGraph[summedR / (divider * numNonTransparentPixels), summedG / (divider * numNonTransparentPixels), summedB / (divider * numNonTransparentPixels)] = new GraphNode();
                                    parentQueueGraph[summedR / (divider * numNonTransparentPixels), summedG / (divider * numNonTransparentPixels), summedB / (divider * numNonTransparentPixels)].representativeID = currentCreature.Genome.fileNumber;
                                }
                                parentQueueGraph[summedR / (divider * numNonTransparentPixels), summedG / (divider * numNonTransparentPixels), summedB / (divider * numNonTransparentPixels)].r = summedR / (divider * numNonTransparentPixels);
                                parentQueueGraph[summedR / (divider * numNonTransparentPixels), summedG / (divider * numNonTransparentPixels), summedB / (divider * numNonTransparentPixels)].g = summedG / (divider * numNonTransparentPixels);
                                parentQueueGraph[summedR / (divider * numNonTransparentPixels), summedG / (divider * numNonTransparentPixels), summedB / (divider * numNonTransparentPixels)].b = summedB / (divider * numNonTransparentPixels);
                            }
                        }

                        // After all individuals have been read, append the counts to an external text file
                        using (System.IO.StreamWriter file = new System.IO.StreamWriter(logsFolder + "parent-queue-visualization.txt", true))
                        {
                            // Then, loop through each bin and record the count for each
                            for (int r = 0; r < numDimsPerRGBAxis; r++)
                            {
                                for (int g = 0; g < numDimsPerRGBAxis; g++)
                                {
                                    for (int b = 0; b < numDimsPerRGBAxis; b++)
                                    {
                                        if (parentQueueGraph[r, g, b] == null)
                                        {
                                            file.Write("0,");
                                        }
                                        else
                                        {
                                            file.Write("1,");
                                        }
                                    }
                                }
                            }
                            file.WriteLine();
                        }

                        // Connect all adjacent neighbors in the parent queue graph
                        for (int r = 0; r < numDimsPerRGBAxis; r++)
                        {
                            for (int g = 0; g < numDimsPerRGBAxis; g++)
                            {
                                for (int b = 0; b < numDimsPerRGBAxis; b++)
                                {
                                    if (parentQueueGraph[r, g, b] != null)
                                    {
                                        activatedNodes.Add(parentQueueGraph[r, g, b]);
                                        for (int rmod = -1; rmod < 2; rmod++)
                                        {
                                            for (int gmod = -1; gmod < 2; gmod++)
                                            {
                                                for (int bmod = -1; bmod < 2; bmod++)
                                                {
                                                    if (!(rmod == 0 && bmod == 0 && gmod == 0) && (r + rmod > -1) && (r + rmod < numDimsPerRGBAxis) && (g + gmod > -1) && (g + gmod < numDimsPerRGBAxis) && (b + bmod > -1) && (b + bmod < numDimsPerRGBAxis))
                                                    {
                                                        if (parentQueueGraph[r + rmod, g + gmod, b + bmod] != null)
                                                        {
                                                            parentQueueGraph[r, g, b].connections.Add(parentQueueGraph[r + rmod, g + gmod, b + bmod]);
                                                        }
                                                    }
                                                }
                                            }
                                        }
                                    }
                                }
                            }
                        }


                        // Now that all nodes have been connected, identify the connected components.
                        // To find all the connected components of a graph, loop through its vertices,
                        // starting a new breadth first or depth first search whenever the loop reaches
                        // a vertex that has not already been included in a previously found connected component.
                        while (activatedNodes.Count > 0)
                        {
                            if (activatedNodes[0].parent == null)
                            {
                                // Create a new disjoint set and add the current parent (it will be the root)
                                disjointSets.Add(new List <GraphNode>());
                                disjointSets[disjointSets.Count - 1].Add(activatedNodes[0]);
                            }
                            else
                            {
                                // Add the node to the correct disjoint set (containing the parent)
                                foreach (List <GraphNode> set in disjointSets)
                                {
                                    if (set.Contains(activatedNodes[0].parent))
                                    {
                                        set.Add(activatedNodes[0]);
                                        break;
                                    }
                                }
                            }

                            // Find all of the current node's children and set the current node as the parent
                            foreach (GraphNode connectedNode in activatedNodes[0].connections)
                            {
                                connectedNode.parent = activatedNodes[0];
                            }

                            activatedNodes.RemoveAt(0);
                        }

                        using (System.IO.StreamWriter file = new System.IO.StreamWriter(logsFolder + "disjointsets.txt", true))
                        {
                            file.WriteLine(disjointSets.Count);
                        }

                        // Print out connected component information
                        using (System.IO.StreamWriter file = new System.IO.StreamWriter(logsFolder + "parent-queue-connected-components.txt", true))
                        {
                            file.WriteLine(disjointSets.Count);
                            foreach (List <GraphNode> set in disjointSets)
                            {
                                for (int i = 0; i < set.Count; i++)
                                {
                                    file.Write(set[i].r + " " + set[i].g + " " + set[i].b + " " + set[i].representativeID + ", ");
                                }
                                file.WriteLine();

                                set.Clear();
                            }
                        }

                        activatedNodes.Clear();
                        disjointSets.Clear();
                    }
                    else
                    {
                        throw new Exception(logsFolder + "ParentList.txt not found.");
                    }


                    // Finally, reset the components list and free the textures for each individual
                    for (int i = 0; i < Components.Count; i++)
                    {
                        ((Creature)(Components[i])).Texture.Dispose();
                    }
                    Components.Clear();
                }

                numPlanters = 0;
                if (File.Exists(snapshotNum + "\\RunInfo.txt"))
                {
                    using (System.IO.StreamReader reader = new System.IO.StreamReader(snapshotNum + "\\RunInfo.txt"))
                    {
                        // Skip first line, then read the rest
                        while ((nextLine = reader.ReadLine()) != null)
                        {
                            if (nextLine.Contains("planted successfully"))
                            {
                                numPlanters++;
                            }
                        }
                    }
                }

                using (System.IO.StreamWriter file = new System.IO.StreamWriter(logsFolder + "planters.txt", true))
                {
                    file.WriteLine(numPlanters);
                }
            }
        }
Пример #23
0
        static public NetworkModel DecodeToNetworkModel(ConcurrentNetwork network)
        {
            ModelNeuronList masterNeuronList = new ModelNeuronList();

            // loop all neurons and build a table keyed on id.
            Hashtable neuronTable = new Hashtable(network.MasterNeuronList.Count);

            foreach (Neuron neuron in network.MasterNeuronList)
            {
                ModelNeuron modelNeuron = new ModelNeuron(neuron.NeuronType, neuron.Id, ActivationFunctionFactory.GetActivationFunction("NullFn"));
                neuronTable.Add(modelNeuron.Id, modelNeuron);
                masterNeuronList.Add(modelNeuron);
            }

            // Loop through all of the connections (within the neurons)
            // Now we have a neuron table keyed on id we can attach the connections
            // to their source and target neurons.
            foreach (Neuron neuron in network.MasterNeuronList)
            {
                foreach (Connection connection in neuron.ConnectionList)
                {
                    ModelConnection modelConnection = new ModelConnection();
                    modelConnection.Weight       = connection.Weight;
                    modelConnection.SourceNeuron = (ModelNeuron)neuronTable[connection.SourceNeuronId];
                    modelConnection.TargetNeuron = (ModelNeuron)neuronTable[connection.TargetNeuronId];

                    modelConnection.SourceNeuron.OutConnectionList.Add(modelConnection);
                    modelConnection.TargetNeuron.InConnectionList.Add(modelConnection);
                }
            }

            return(new NetworkModel(masterNeuronList));
        }
Пример #24
0
        private static ConcurrentNetwork ReadNetwork(XmlElement xmlNetwork)
        {
            //--- Read the activation function id.
            string activationFnId            = XmlUtilities.GetAttributeValue(xmlNetwork, "activation-fn-id");
            IActivationFunction activationFn = ActivationFunctionFactory.GetActivationFunction(activationFnId);

            // Read the neurons into a list and also into a table keyed on id.
            Hashtable neuronTable = new Hashtable();

            NeuronList biasNeuronList   = new NeuronList();
            NeuronList inputNeuronList  = new NeuronList();
            NeuronList hiddenNeuronList = new NeuronList();
            NeuronList outputNeuronList = new NeuronList();
            NeuronList masterNeuronList = new NeuronList();

            XmlNodeList listNeurons = xmlNetwork.SelectNodes("neurons/neuron");

            foreach (XmlElement xmlNeuron in listNeurons)
            {
                Neuron neuron = ReadNeuron(xmlNeuron);
                neuronTable.Add(neuron.Id, neuron);

                switch (neuron.NeuronType)
                {
                case NeuronType.Bias:
                    biasNeuronList.Add(neuron);
                    break;

                case NeuronType.Input:
                    inputNeuronList.Add(neuron);
                    break;

                case NeuronType.Hidden:
                    hiddenNeuronList.Add(neuron);
                    break;

                case NeuronType.Output:
                    outputNeuronList.Add(neuron);
                    break;
                }
            }

            //----- Build a master list of neurons. Neurons must be ordered by type - bias,input,hidden,output.
            if (biasNeuronList.Count != 1)
            {
                throw new SharpNeatLib.Xml.XmlException("Neural Network XML must contain exactly 1 bias node.");
            }

            foreach (Neuron neuron in biasNeuronList)
            {
                masterNeuronList.Add(neuron);
            }

            foreach (Neuron neuron in inputNeuronList)
            {
                masterNeuronList.Add(neuron);
            }

            foreach (Neuron neuron in hiddenNeuronList)
            {
                masterNeuronList.Add(neuron);
            }

            foreach (Neuron neuron in outputNeuronList)
            {
                masterNeuronList.Add(neuron);
            }

            //----- Read Connections and store against target neurons.
            XmlNodeList listConnections = xmlNetwork.SelectNodes("connections/connection");

            foreach (XmlElement xmlConnection in listConnections)
            {
                Connection connection = ReadConnection(xmlConnection);

                // Store the connection with it's target neuron.
                ((Neuron)neuronTable[connection.TargetNeuronId]).ConnectionList.Add(connection);

                // Bind the connection to it's source neuron.
                connection.SetSourceNeuron((Neuron)neuronTable[connection.SourceNeuronId]);
            }

            return(new ConcurrentNetwork(masterNeuronList));
        }
Пример #25
0
        protected override void Initialize()
        {
            base.Initialize();

            string backgroundFileName = (logsFolder + "0\\background.dat");

            backgroundImage = new StaticImage("Background", 0, 0, ReadBackgroundFromFile(backgroundFileName), this);

            // Create the world / region system
            // Note: The morphology must be generated in advance of the Load

            int folderNumber = (Simulator.replayIndividualNumber - 1) / numCreaturesPerFolder;

            initialMorphologyFilename = logsFolder + folderNumber.ToString() + "\\" + morphologyXMLprefix + Simulator.replayIndividualNumber.ToString() + ".xml";
            INetwork morphologyCPPN = loadCPPNFromXml(initialMorphologyFilename).Decode(ActivationFunctionFactory.GetActivationFunction("BipolarSigmoid"));

            morphology = generateMorphology(morphologyCPPN);
            StaticImage demoCreature = new StaticImage("Creature", initialBoardWidth / 2, initialBoardHeight / 2, morphology, this);
        }
        public NeatGenome.NeatGenome generateGenomeStackSituationalPolicy(INetwork network, List <float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet, float signal)
        {
            // Schrum: For debugging
            //Console.WriteLine("generateGenomeStackSituationalPolicy:signal=" + signal);
            //Console.WriteLine("CPPN inputs = " + network.InputNeuronCount);

            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));

            // Schrum: Too many inputs: Only store those that are needed
            //float[] coordinates = new float[5 + 1]; // <-- Schrum: bit sloppy: frequently results in unused CPPN inputs. Should make more precise
            float[] coordinates = new float[network.InputNeuronCount]; // Schrum: CPPN tracks how many inputs it needs
            float   output;
            uint    connectionCounter = 0;
            float   agentDelta        = 2.0f / (numberOfAgents - 1);
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount  = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            // Schrum: If we are inside this function, then we either have a heterogeneous team
            //         of a single agent (not sure why that ended up being the case; odd use of homogeneousTeam).
            //         Therefore, numberOfAgents tells us whether we need to save space for a Z-coordinate,
            //         and whether we are expecting a Situation input.
            if (numberOfAgents == 1 && coordinates.Length > 4)
            {
                coordinates[4] = signal; // No Z coord, but save situation
            }
            else if (coordinates.Length > 5)
            {
                coordinates[5] = signal; // Both Z coord and situation
            }
            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in thisorder: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            uint  agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                // Schrum: Only include Z-coord as input if there are multiple team members
                if (numberOfAgents > 1)
                {
                    coordinates[4] = stackCoordinate; // Schrum: z-coord will always be at index 4
                }
                // Schrum: Debug
                //Console.WriteLine("CPPN inputs (first 4 blank): " + string.Join(",", coordinates));


                uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;


                        foreach (PointF source in ng.NeuronPositions)
                        {
                            //----------------------------

                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;

                                //Input
                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;

                                //Output
                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break;      //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;

                                case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;

                                case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //--- bias
                                //-----------------Get the bias of the target node
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID]    = true;
                                }
                                //--bias



                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                // Schrum: Debug
                                //Console.WriteLine("CPPN inputs: " + string.Join(",", coordinates));

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                // Schrum: Observation: It seems impossible to use both LEO and adaptive networks because of these hardcoded magic numbers
                                if (adaptiveNetwork)
                                {
                                    A            = network.GetOutputSignal(2);
                                    B            = network.GetOutputSignal(3);
                                    C            = network.GetOutputSignal(4);
                                    D            = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                // Schrum: Observation: In long run, might be desirable to use LEO, but incompatible with special preference neuron output
                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo       = network.GetOutputSignal(2);
                                }

                                // Schrum: This is a horrible hack, but it gets the job done for now.
                                // The reason this works is that it makes the following assumptions that could easily be broken in the future:
                                // 1) It is assumed that the only reason a CPPN would have 3 outputs per policy is if the third is for preference links
                                // 2) It is assumed that in a substrate with a preference neuron, the y-coord will always be 0.8, and no other neuron will have
                                //    that y-coord.
                                //Console.WriteLine("output:" + coordinates[0] + "," + coordinates[1] + ":" + coordinates[2] + "," + coordinates[3]);
                                //Console.WriteLine("network.OutputsPerPolicy == 3" + (network.OutputsPerPolicy == 3));
                                //Console.WriteLine("target.Y == 0.8" + (target.Y == 0.8f));
                                if (network.OutputsPerPolicy == 3 && target.Y == 0.8f)
                                {
                                    // The output from the link for the preference neuron replaces the standard output.
                                    // Because the link weight is defined by a totally different CPPN output, the preference
                                    // neuron is more free to behave very differently.
                                    output = network.GetOutputSignal(2);
                                    //Console.WriteLine("Preference output:" + coordinates[0] + "," + coordinates[1] + ":" + coordinates[2] + "," + coordinates[3]);
                                }

                                if (!useLeo || leo > 0.0)
                                {
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight =
                                            (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) *
                                                    weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive networkset weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new
                                                        ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref
                                                                       coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable  = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;

            // Schrum: Debugging
            // Looking at the control networks has revealed that the order of details in the substrate
            // description is important. The layer with the preference neuron has to be defined last
            // if it is to be the final neuron in the linearly organized output layer.
            //XmlDocument doc = new XmlDocument();
            //SharpNeatLib.NeatGenome.Xml.XmlGenomeWriterStatic.Write(doc, sng);
            //System.IO.FileInfo oFileInfo = new System.IO.FileInfo("temp.xml");
            //doc.Save(oFileInfo.FullName);

            return(sng);
        }
        public NeatGenome.NeatGenome generateMultiGenomeStack(INetwork network, List <float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));

            float[] coordinates = new float[5];
            float   output;
            uint    connectionCounter = 0;
            float   agentDelta        = 2.0f / (numberOfAgents - 1);
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount  = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            // Schrum: debugging

            /*
             * Console.WriteLine("generateMultiGenomeStack");
             * Console.WriteLine("numberOfAgents:" + numberOfAgents);
             * Console.WriteLine("totalOutputCount:" + totalOutputCount);
             * Console.WriteLine("totalInputCount:" + totalInputCount);
             */

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents + OutputCount * numberOfAgents, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];

            uint  agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;

            foreach (float stackCoordinate in stackCoordinates)
            {
                coordinates[4] = stackCoordinate;
                uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
                NeuronGroup connectedNG;

                foreach (NeuronGroup ng in neuronGroups)
                {
                    foreach (uint connectedTo in ng.ConnectedTo)
                    {
                        connectedNG = getNeuronGroup(connectedTo);

                        sourceCount = 0;
                        foreach (PointF source in ng.NeuronPositions)
                        {
                            targetCout = 0;
                            foreach (PointF target in connectedNG.NeuronPositions)
                            {
                                switch (ng.GroupType)
                                {
                                case 0: sourceID = (agent * InputCount) + ng.GlobalID + sourceCount; break;                                       //Input

                                case 1: sourceID = totalInputCount + (agent * OutputCount) + ng.GlobalID + sourceCount; break;                    //Output

                                case 2: sourceID = totalInputCount + totalOutputCount + (agent * HiddenCount) + ng.GlobalID + sourceCount; break; //Hidden
                                }

                                switch (connectedNG.GroupType)
                                {
                                case 0: targetID = (agent * InputCount) + connectedNG.GlobalID + targetCout; break;

                                case 1: targetID = totalInputCount + (agent * OutputCount) + connectedNG.GlobalID + targetCout; break;

                                case 2: targetID = totalInputCount + totalOutputCount + (agent * HiddenCount) + connectedNG.GlobalID + targetCout; break;
                                }

                                //target node bias
                                if (!biasCalculated[targetID])
                                {
                                    coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                    network.ClearSignals();
                                    network.SetInputSignals(coordinates);
                                    ((ModularNetwork)network).RecursiveActivation();
                                    neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                    biasCalculated[targetID]    = true;
                                }

                                coordinates[0] = source.X;
                                coordinates[1] = source.Y;
                                coordinates[2] = target.X;
                                coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                //network.MultipleSteps(iterations);
                                output = network.GetOutputSignal(0);

                                double leo = 0.0;

                                if (adaptiveNetwork)
                                {
                                    A            = network.GetOutputSignal(2);
                                    B            = network.GetOutputSignal(3);
                                    C            = network.GetOutputSignal(4);
                                    D            = network.GetOutputSignal(5);
                                    learningRate = network.GetOutputSignal(6);
                                }

                                if (modulatoryNet)
                                {
                                    modConnection = network.GetOutputSignal(7);
                                }
                                else
                                {
                                    modConnection = 0.0f;
                                }

                                if (useLeo)
                                {
                                    threshold = 0.0;
                                    leo       = network.GetOutputSignal(2);
                                }

                                if (!useLeo || leo > 0.0)
                                {
                                    if (Math.Abs(output) > threshold)
                                    {
                                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                        //if (adaptiveNetwork)
                                        //{
                                        //    //If adaptive network set weight to small value
                                        //    weight = 0.1f;
                                        //}
                                        connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, A, B, C, D, modConnection, learningRate));
                                    }
                                }
                                //else
                                //{
                                //    Console.WriteLine("Not connected");
                                //}
                                targetCout++;
                            }
                            sourceCount++;
                        }
                    }
                }
                agent++;
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable  = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;

            // Schrum: debugging
            //Console.WriteLine("sng.InputNeuronCount:" + sng.InputNeuronCount);
            //Console.WriteLine("sng.OutputNeuronCount:" + sng.OutputNeuronCount);

            return(sng);
        }
        private NeatGenome.NeatGenome generateHomogeneousGenome(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList((int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            float[] coordinates = new float[4];
            float   output;
            uint    connectionCounter = 0;
            int     iterations        = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount;
            uint totalInputCount  = InputCount;
            uint totalHiddenCount = HiddenCount;

            uint   sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold   = HyperNEATParameters.threshold;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < totalHiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[] biasCalculated = new bool[totalHiddenCount + totalOutputCount + totalInputCount];


            uint        sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;

            foreach (NeuronGroup ng in neuronGroups)
            {
                foreach (uint connectedTo in ng.ConnectedTo)
                {
                    connectedNG = getNeuronGroup(connectedTo);

                    sourceCount = 0;
                    foreach (PointF source in ng.NeuronPositions)
                    {
                        targetCout = 0;
                        foreach (PointF target in connectedNG.NeuronPositions)
                        {
                            switch (ng.GroupType)
                            {
                            case 0: sourceID = ng.GlobalID + sourceCount; break;                                      //Input

                            case 1: sourceID = totalInputCount + ng.GlobalID + sourceCount; break;                    //Output

                            case 2: sourceID = totalInputCount + totalOutputCount + ng.GlobalID + sourceCount; break; //Hidden
                            }

                            switch (connectedNG.GroupType)
                            {
                            case 0: targetID = connectedNG.GlobalID + targetCout; break;

                            case 1: targetID = totalInputCount + connectedNG.GlobalID + targetCout; break;

                            case 2: targetID = totalInputCount + totalOutputCount + connectedNG.GlobalID + targetCout; break;
                            }

                            //calculate bias of target node
                            if (!biasCalculated[targetID])
                            {
                                coordinates[0] = 0.0f; coordinates[1] = 0.0f; coordinates[2] = target.X; coordinates[3] = target.Y;

                                network.ClearSignals();
                                network.SetInputSignals(coordinates);
                                ((ModularNetwork)network).RecursiveActivation();
                                neurons[(int)targetID].Bias = (float)(network.GetOutputSignal(1) * weightRange);
                                biasCalculated[targetID]    = true;
                            }

                            coordinates[0] = source.X;
                            coordinates[1] = source.Y;
                            coordinates[2] = target.X;
                            coordinates[3] = target.Y;

                            network.ClearSignals();
                            network.SetInputSignals(coordinates);
                            ((ModularNetwork)network).RecursiveActivation();
                            //network.MultipleSteps(iterations);
                            output = network.GetOutputSignal(0);

                            if (Math.Abs(output) > threshold)
                            {
                                float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                                connections.Add(new ConnectionGene(connectionCounter++, sourceID, targetID, weight, ref coordinates, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f));
                            }
                            //else
                            //{
                            //    Console.WriteLine("Not connected");
                            //}
                            targetCout++;
                        }
                        sourceCount++;
                    }
                }
            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable  = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
            return(gn);
        }
Пример #29
0
        /// <summary>
        /// Create a default minimal genome that describes a NN with the given number of inputs and outputs.
        /// </summary>
        /// <returns></returns>
        public static IGenome CreateGenome(NeatParameters neatParameters, IdGenerator idGenerator, int inputNeuronCount, int outputNeuronCount, float connectionProportion, bool neatBrain)
        {
            IActivationFunction actFunct;
            NeuronGene          neuronGene;                                  // temp variable.
            NeuronGeneList      inputNeuronGeneList  = new NeuronGeneList(); // includes bias neuron.
            NeuronGeneList      outputNeuronGeneList = new NeuronGeneList();
            NeuronGeneList      neuronGeneList       = new NeuronGeneList();
            ConnectionGeneList  connectionGeneList   = new ConnectionGeneList();

            // IMPORTANT NOTE: The neurons must all be created prior to any connections. That way all of the genomes
            // will obtain the same innovation ID's for the bias,input and output nodes in the initial population.
            // Create a single bias neuron.
            //TODO: DAVID proper activation function change to NULL?

            actFunct   = ActivationFunctionFactory.GetActivationFunction("NullFn");
            neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Bias, actFunct, 0f);
            inputNeuronGeneList.Add(neuronGene);
            neuronGeneList.Add(neuronGene);

            // Create input neuron genes.

            for (int i = 0; i < inputNeuronCount; i++)
            {
                //TODO: DAVID proper activation function change to NULL?
                neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Input, actFunct, 0f);
                inputNeuronGeneList.Add(neuronGene);
                neuronGeneList.Add(neuronGene);
            }

            // Create output neuron genes.
            if (neatBrain)
            {
                actFunct = ActivationFunctionFactory.GetActivationFunction("SteepenedSigmoidApproximation");
            }
            else
            {
                actFunct = ActivationFunctionFactory.GetActivationFunction("BipolarSigmoid");
            }
            for (int i = 0; i < outputNeuronCount; i++)
            {
                //actFunct = ActivationFunctionFactory.GetRandomActivationFunction(neatParameters);
                //TODO: DAVID proper activation function
                neuronGene = new NeuronGene(idGenerator.NextInnovationId, NeuronType.Output, actFunct, 1f);
                outputNeuronGeneList.Add(neuronGene);
                neuronGeneList.Add(neuronGene);
            }

            // Loop over all possible connections from input to output nodes and create a number of connections based upon
            // connectionProportion.
            foreach (NeuronGene targetNeuronGene in outputNeuronGeneList)
            {
                foreach (NeuronGene sourceNeuronGene in inputNeuronGeneList)
                {
                    // Always generate an ID even if we aren't going to use it. This is necessary to ensure connections
                    // between the same neurons always have the same ID throughout the generated population.
                    uint connectionInnovationId = idGenerator.NextInnovationId;

                    if (Utilities.NextDouble() < connectionProportion)
                    {                           // Ok lets create a connection.
                        connectionGeneList.Add(new ConnectionGene(connectionInnovationId,
                                                                  sourceNeuronGene.InnovationId,
                                                                  targetNeuronGene.InnovationId,
                                                                  (Utilities.NextDouble() * neatParameters.connectionWeightRange) - neatParameters.connectionWeightRange / 2.0)); // Weight 0 +-5
                    }
                }
            }

            // Don't create any hidden nodes at this point. Fundamental to the NEAT way is to start minimally!
            return(new NeatGenome(idGenerator.NextGenomeId, neuronGeneList, connectionGeneList, inputNeuronCount, outputNeuronCount));
        }
        private NeatGenome.NeatGenome generateHomogeneousGenomeES(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            List <PointF> hiddenNeuronPositions = new List <PointF>();

            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList  connections        = new ConnectionGeneList();//(int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            List <PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List <PointF> inputNeuronPositions  = getNeuronGroupByType(0);


            EvolvableSubstrate se = new EvolvableSubstrate();

            se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                                   HyperNEATParameters.initialRes,
                                   (float)HyperNEATParameters.varianceThreshold,
                                   (float)HyperNEATParameters.bandingThreshold,
                                   (int)HyperNEATParameters.ESIterations,
                                   (float)HyperNEATParameters.divisionThreshold,
                                   HyperNEATParameters.maximumRes,
                                   InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref connections, ref hiddenNeuronPositions);

            HiddenCount = (uint)hiddenNeuronPositions.Count;

            float[] coordinates       = new float[5];
            uint    connectionCounter = (uint)connections.Count;

            NeuronGeneList neurons;

            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < InputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            // set up the output nodes
            for (uint a = 0; a < OutputCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
            }
            // set up the hidden nodes
            for (uint a = 0; a < HiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }

            bool[]      visited  = new bool[neurons.Count];
            List <uint> nodeList = new List <uint>();

            bool[] connectedToInput = new bool[neurons.Count];

            bool[] isOutput = new bool[neurons.Count];

            bool danglingConnection = true;

            while (danglingConnection)
            {
                bool[] hasIncomming = new bool[neurons.Count];

                foreach (ConnectionGene co in connections)
                {
                    //  if (co.SourceNeuronId != co.TargetNeuronId)
                    // {
                    hasIncomming[co.TargetNeuronId] = true;
                    // }
                }
                for (int i = 0; i < InputCount; i++)
                {
                    hasIncomming[i] = true;
                }

                bool[] hasOutgoing = new bool[neurons.Count];
                foreach (ConnectionGene co in connections)
                {
                    //  if (co.TargetNeuronId != co.SourceNeuronId)
                    //  {
                    if (co.TargetNeuronId != co.SourceNeuronId)  //neurons that only connect to themselfs don't count
                    {
                        hasOutgoing[co.SourceNeuronId] = true;
                    }
                    //  }
                }

                //Keep  output neurons
                for (int i = 0; i < OutputCount; i++)
                {
                    hasOutgoing[i + InputCount] = true;
                }


                danglingConnection = false;
                //Check if there are still dangling connections
                foreach (ConnectionGene co in connections)
                {
                    if (!hasOutgoing[co.TargetNeuronId] || !hasIncomming[co.SourceNeuronId])
                    {
                        danglingConnection = true;
                        break;
                    }
                }

                connections.RemoveAll(delegate(ConnectionGene m) { return(!hasIncomming[m.SourceNeuronId]); });
                connections.RemoveAll(delegate(ConnectionGene m) { return(!hasOutgoing[m.TargetNeuronId]); });
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            SharpNeatLib.NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(InputCount), (int)(OutputCount));
            //     SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable  = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;

            return(gn);
        }