Пример #1
0
 public override void RefreshView(SharpNeatLib.NeuralNetwork.INetwork network)
 {
     currentBest = network;
     if (nw != null)
     {
         nw.RefreshView(network);
     }
     viewing = true;
 }
Пример #2
0
 public override SharpNeatLib.NeatGenome.NeatGenome generateGenome(SharpNeatLib.NeuralNetwork.INetwork CPPN)
 {
     if (concentric)
     {
         return(CPPN.InputNeuronCount == 5 ? generatePerceptronCircle(CPPN, true) : generatePerceptronCircle(CPPN, false));
     }
     else
     {
         return(CPPN.InputNeuronCount == 5 ? generatePerceptronPattern(CPPN, true) : generatePerceptronPattern(CPPN, false));
     }
 }
Пример #3
0
 //public override void RefreshView(SharpNeatLib.NeuralNetwork.INetwork network)
 public void RefreshView(SharpNeatLib.NeuralNetwork.INetwork network)
 {
     if (network.OutputNeuronCount == 1)
     {
         if (FoodGatherParams.circle)
         {
             network = FoodGathererNetworkEvaluator.substrate.generateNetwork(network);
         }
         else
         {
             network = FoodGathererNetworkEvaluator.substrate.generateNetwork(network);
         }
     }
     connections = ((FloatFastConcurrentNetwork)network).connectionArray;
     neurons     = ((FloatFastConcurrentNetwork)network).neuronSignalArray;
     resolution  = FoodGatherParams.resolution;
     this.Refresh();
 }
Пример #4
0
        public virtual NeatGenome GenerateGenome(SharpNeatLib.NeuralNetwork.INetwork cppnNetwork)
        {
            ConnectionGeneList connections = new ConnectionGeneList();

            int maxIterations = 2 * (cppnNetwork.TotalNeuronCount - (cppnNetwork.InputNeuronCount + cppnNetwork.OutputNeuronCount)) + 1;

            // TODO:
            maxIterations = Math.Min(maxIterations, 4);

            // TODO:
            double epsilon     = 0.0;
            double threshold   = HyperNEATParameters.threshold;
            double weightRange = HyperNEATParameters.weightRange;


            uint biasid           = 0u;
            uint inputsStart      = biasid + (m_useBias ? 1u : 0u);
            uint inputsEnd        = (uint)(inputsStart + (m_rows * m_cols));
            uint outputsStart     = inputsEnd;
            uint outputsEnd       = (uint)(outputsStart + (m_rows * m_cols));
            uint firstHiddenStart = outputsEnd;

            float[] coordinates = new float[4];
            float   output;
            uint    connectionCounter = 0u;

            if (m_useBias)
            {
                // we use the bias neuron on the center, and use the
                // 2nd output of the CPPN to compute its weight
                coordinates[0] = 0.0f;
                coordinates[1] = 0.0f;

                // add the bias link to all neurons of the next layer
                for (int ni = 0, ncount = m_rows * m_cols; ni < ncount; ni++)
                {
                    int row = (ni / m_cols);
                    int col = (ni % m_cols);

                    coordinates[2] = (-1.0f) + (m_colDelta * (col + 0.5f));
                    coordinates[3] = (-1.0f) + (m_rowDelta * (row + 0.5f));

                    cppnNetwork.ClearSignals();
                    cppnNetwork.SetInputSignals(coordinates);
                    cppnNetwork.RelaxNetwork(maxIterations, epsilon);

                    for (int li = 0; li < m_numLayers - 1; li++)
                    {
                        output = cppnNetwork.GetOutputSignal(li * 2 + 1);
                        uint lstart = (uint)(firstHiddenStart + li * (m_cols * m_rows));
                        if (li == m_numLayers - 2)
                        {
                            lstart = outputsStart;
                        }

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++,
                                                               biasid, (uint)(lstart + ni), weight));
                        }
                    }
                }
            }

            // now add the connections
            for (int inpi = 0, inpcount = m_rows * m_cols; inpi < inpcount; inpi++)
            {
                for (int outi = 0, outcount = m_rows * m_cols; outi < outcount; outi++)
                {
                    int inrow = (inpi / m_cols); int incol = (inpi % m_cols);
                    int outrow = (outi / m_cols); int outcol = (outi % m_cols);

                    coordinates[0] = (-1.0f) + (m_colDelta * (incol + 0.5f));
                    coordinates[1] = (-1.0f) + (m_rowDelta * (inrow + 0.5f));
                    coordinates[2] = (-1.0f) + (m_colDelta * (outcol + 0.5f));
                    coordinates[3] = (-1.0f) + (m_rowDelta * (outrow + 0.5f));

                    cppnNetwork.ClearSignals();
                    cppnNetwork.SetInputSignals(coordinates);
                    cppnNetwork.RelaxNetwork(maxIterations, epsilon);

                    for (int li = 0; li < m_numLayers - 1; li++)
                    {
                        output = cppnNetwork.GetOutputSignal(m_useBias ? li * 2 : li);

                        uint dststart = (uint)(firstHiddenStart + li * (m_cols * m_rows));
                        if (li == m_numLayers - 2)
                        {
                            dststart = outputsStart;
                        }

                        uint srcstart = (uint)(firstHiddenStart + (li - 1) * (m_cols * m_rows));
                        if (li == 0)
                        {
                            srcstart = inputsStart;
                        }

                        if (Math.Abs(output) > threshold)
                        {
                            float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                            connections.Add(new ConnectionGene(connectionCounter++,
                                                               (uint)(srcstart + inpi), (uint)(dststart + outi), weight));
                        }
                    }
                }
            }


            return(new SharpNeatLib.NeatGenome.NeatGenome(0, m_neurons, connections, m_rows * m_cols, m_rows * m_cols));
        }
Пример #5
0
        public SharpNeatLib.NeatGenome.NeatGenome GenerateGenome(SharpNeatLib.NeuralNetwork.INetwork network)
        {
            int    maxIterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;
            double epsilon       = 0.0;
            double threshold     = HyperNEATParameters.threshold;
            double weightRange   = HyperNEATParameters.weightRange;

            // store constant ids for later references

            uint biasid       = 0u;
            uint inputsStart  = m_useBias ? 1u : 0u;
            uint inputsEnd    = (uint)(inputsStart + (m_rows * m_cols));
            uint bownerStart  = inputsEnd;
            uint bownerEnd    = bownerStart + 1;
            uint outputsStart = bownerEnd;
            uint outputsEnd   = (uint)(outputsStart + (m_rows * m_cols));

            float[]            coordinates = new float[4];
            float              output;
            uint               connectionCounter = 0;
            ConnectionGeneList connections       = new ConnectionGeneList();

            if (false) //(m_useBias)
            {
                // we use the bias neuron on the center, and use the
                // 2nd output of the CPPN to compute its weight
                coordinates[0] = 0.0f;
                coordinates[1] = 0.0f;

                // add the bias link to all output neurons
                for (int ni = 0, ncount = m_rows * m_cols; ni < ncount; ni++)
                {
                    int row = (ni / m_cols);
                    int col = (ni % m_cols);

                    coordinates[2] = (-1.0f) + (m_colDelta * (col + 0.5f));
                    coordinates[3] = (-1.0f) + (m_rowDelta * (row + 0.5f));

                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(1);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++, biasid, (uint)(outputsStart + ni), weight));
                    }
                }
            }



            // now add possible connections between all input output neuron pairs

            for (int inpi = 0, inpcount = m_rows * m_cols; inpi < inpcount; inpi++)
            {
                for (int outi = 0, outcount = m_rows * m_cols; outi < outcount; outi++)
                {
                    int inrow = (inpi / m_cols); int incol = (inpi % m_cols);
                    int outrow = (outi / m_cols); int outcol = (outi % m_cols);

                    coordinates[0] = (-1.0f) + (m_colDelta * (incol + 0.5f));
                    coordinates[1] = (-1.0f) + (m_rowDelta * (inrow + 0.5f));
                    coordinates[2] = (-1.0f) + (m_colDelta * (outcol + 0.5f));
                    coordinates[3] = (-1.0f) + (m_rowDelta * (outrow + 0.5f));

                    network.ClearSignals();
                    network.SetInputSignals(coordinates);
                    network.RelaxNetwork(maxIterations, epsilon);
                    output = network.GetOutputSignal(0);

                    if (Math.Abs(output) > threshold)
                    {
                        float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                        connections.Add(new ConnectionGene(connectionCounter++,
                                                           (uint)(inputsStart + inpi), (uint)(outputsStart + outi), weight));
                    }
                }
            }

            // adding the node for ball owner flag

            // we use the bias neuron on the center, and use the
            // 2nd output of the CPPN to compute its weight
            coordinates[0] = 0.0f;
            coordinates[1] = 0.0f;

            // add the bias link to all output neurons
            for (int ni = 0, ncount = m_rows * m_cols; ni < ncount; ni++)
            {
                int row = (ni / m_cols);
                int col = (ni % m_cols);

                coordinates[2] = (-1.0f) + (m_colDelta * (col + 0.5f));
                coordinates[3] = (-1.0f) + (m_rowDelta * (row + 0.5f));

                network.ClearSignals();
                network.SetInputSignals(coordinates);
                network.RelaxNetwork(maxIterations, epsilon);
                output = network.GetOutputSignal(1);

                if (Math.Abs(output) > threshold)
                {
                    float weight = (float)(((Math.Abs(output) - (threshold)) / (1 - threshold)) * weightRange * Math.Sign(output));
                    connections.Add(new ConnectionGene(connectionCounter++, bownerStart, (uint)(outputsStart + ni), weight));
                }
            }


            return(new SharpNeatLib.NeatGenome.NeatGenome(0, m_neurons, connections, m_rows * m_cols, m_rows * m_cols));
        }
Пример #6
0
 public SharpNeatLib.NeuralNetwork.INetwork GenerateNetwork(SharpNeatLib.NeuralNetwork.INetwork CPPN)
 {
     return(GenerateGenome(CPPN).Decode(null));
 }