コード例 #1
0
        // MPS NOT supported by this method
        private NeatGenome.NeatGenome generateHomogeneousGenomeES(INetwork network, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            if (useMultiPlaneSubstrate) throw new Exception("MPS not implemented for these parameters");
            //CHECK TO SEE IF HIDDEN NEURONS ARE OUTPUT NEURONS
            List<PointF> hiddenNeuronPositions = new List<PointF>();

            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList();//(int)((InputCount * HiddenCount) + (HiddenCount * OutputCount)));

            List<PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List<PointF> inputNeuronPositions = getNeuronGroupByType(0);


            SubstrateEvolution se = new SubstrateEvolution();

            se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                SubstrateEvolution.SAMPLE_WIDTH,
                SubstrateEvolution.SAMPLE_TRESHOLD,
                SubstrateEvolution.NEIGHBOR_LEVEL,
                SubstrateEvolution.INCREASE_RESSOLUTION_THRESHOLD,
                SubstrateEvolution.MIN_DISTANCE,
                SubstrateEvolution.CONNECTION_TRESHOLD, //0.4. ConnectionThreshold
                InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref connections, ref hiddenNeuronPositions);

            HiddenCount = (uint)hiddenNeuronPositions.Count;

            float[] coordinates = new float[5];
            uint connectionCounter = (uint)connections.Count;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount + OutputCount + HiddenCount));

            // set up the input nodes
            for (uint a = 0; a < InputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }

            // set up the output nodes
            for (uint a = 0; a < OutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount, NeuronType.Output, activationFunction));
                
            }
            // set up the hidden nodes
            for (uint a = 0; a < HiddenCount; a++)
            {
                neurons.Add(new NeuronGene(a + InputCount + OutputCount, NeuronType.Hidden, activationFunction));
            }


            uint sourceID = uint.MaxValue, targetID = uint.MaxValue;
            NeuronGroup connectedNG;
            uint c1, c2;

            float delta = 0.15f;//2.0f / InputCount;
            float minDistance, dist, sourceX = -1, sourceY = -1, targetX = -1, targetY = -1;
            uint closestNodeIndex;
            //   int index, hiddenCount;

            //Connections to input nodes    
            // hiddenCount = 0;

            //TEST??????????????????????????????

            double tolerance = 0.1;
            //bool[] taken = new bool[hiddenNeuronGroup.NeuronPositions.Count];
            closestNodeIndex = 0;
            int ccc;

            //CONNECT FROM INPUT NODES
            // ConnectionGeneList addConnections = new ConnectionGeneList();
            targetID = 0;
            bool[] visited = new bool[neurons.Count];
            List<uint> nodeList = new List<uint>();
            bool[] connectedToInput = new bool[neurons.Count];

            //From hidden to output
            //taken = new bool[hiddenNeuronGroup.NeuronPositions.Count];
            // float targetX=-1.0f, targetY=-1.0f;
            targetID = 0;
            //  bool outputConnectedToInput;

            bool[] isOutput = new bool[neurons.Count];
            //float output, weight;
            //bool[] connectedToInput = new bool[neurons.Count];

            //bool connectToHidden;

            float totalConnectionDist = 0.0f;
            //Add connections between Hidden Neurons
            // addConnections.AddRange(connections);

            bool danglingConnection = true;

            while (danglingConnection)
            {
                bool[] hasIncomming = new bool[neurons.Count];

                foreach (ConnectionGene co in connections)
                {
                    //  if (co.SourceNeuronId != co.TargetNeuronId)
                    // {
                    hasIncomming[co.TargetNeuronId] = true;
                    // }
                }
                for (int i = 0; i < InputCount; i++)
                    hasIncomming[i] = true;

                bool[] hasOutgoing = new bool[neurons.Count];
                foreach (ConnectionGene co in connections)
                {
                    //  if (co.TargetNeuronId != co.SourceNeuronId)
                    //  {
                    if (co.TargetNeuronId != co.SourceNeuronId)  //neurons that only connect to themselfs don't count
                    {
                        hasOutgoing[co.SourceNeuronId] = true;
                    }
                    //  }
                }

                //Keep  output neurons
                for (int i = 0; i < OutputCount; i++)
                    hasOutgoing[i + InputCount] = true;


                danglingConnection = false;
                //Check if there are still dangling connections
                foreach (ConnectionGene co in connections)
                {
                    if (!hasOutgoing[co.TargetNeuronId] || !hasIncomming[co.SourceNeuronId])
                    {
                        danglingConnection = true;
                        break;
                    }
                }

                connections.RemoveAll(delegate(ConnectionGene m) { return (!hasIncomming[m.SourceNeuronId]); });
                connections.RemoveAll(delegate(ConnectionGene m) { return (!hasOutgoing[m.TargetNeuronId]); });
            }

            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }

            SharpNeatLib.NeatGenome.NeatGenome gn = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(InputCount), (int)(OutputCount));
        //     SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));

            gn.networkAdaptable = adaptiveNetwork;
            gn.networkModulatory = modulatoryNet;
       
            return gn;
        }
コード例 #2
0
        // MPS NOT supported by this method
        private NeatGenome.NeatGenome generateMultiGenomeStackES(INetwork network, List<float> stackCoordinates, bool normalizeWeights, bool adaptiveNetwork, bool modulatoryNet)
        {
            if (useMultiPlaneSubstrate) throw new Exception("MPS not implemented for these parameters");
            uint numberOfAgents = (uint)stackCoordinates.Count;
            IActivationFunction activationFunction = HyperNEATParameters.substrateActivationFunction;
            ConnectionGeneList connections = new ConnectionGeneList((int)(numberOfAgents * (InputCount * HiddenCount) + numberOfAgents * (HiddenCount * OutputCount)));
            float[] coordinates = new float[5];
            float output;
            uint connectionCounter = 0;
            float agentDelta = 2.0f / (numberOfAgents - 1);
            int iterations = 2 * (network.TotalNeuronCount - (network.InputNeuronCount + network.OutputNeuronCount)) + 1;

            uint totalOutputCount = OutputCount * numberOfAgents;
            uint totalInputCount = InputCount * numberOfAgents;
            uint totalHiddenCount = HiddenCount * numberOfAgents;

            uint sourceCount, targetCout;
            double weightRange = HyperNEATParameters.weightRange;
            double threshold = HyperNEATParameters.threshold;

            NeuronGeneList neurons;
            // SharpNEAT requires that the neuron list be in this order: bias|input|output|hidden
            neurons = new NeuronGeneList((int)(InputCount * numberOfAgents + OutputCount * numberOfAgents + HiddenCount * numberOfAgents));

            // set up the input nodes
            for (uint a = 0; a < totalInputCount; a++)
            {
                neurons.Add(new NeuronGene(a, NeuronType.Input, ActivationFunctionFactory.GetActivationFunction("NullFn")));
            }
            // set up the output nodes
            for (uint a = 0; a < totalOutputCount; a++)
            {

                neurons.Add(new NeuronGene(a + InputCount * numberOfAgents, NeuronType.Output, activationFunction));
            }


            uint agent = 0;
            float A = 0.0f, B = 0.0f, C = 0.0f, D = 0.0f, learningRate = 0.0f, modConnection;


            List<PointF> outputNeuronPositions = getNeuronGroupByType(1);
            List<PointF> inputNeuronPositions = getNeuronGroupByType(0);

            uint hiddenCount = 0;

            foreach (float stackCoordinate in stackCoordinates)
            {
                List<PointF> hiddenNeuronPositions = new List<PointF>();
                ConnectionGeneList con = new ConnectionGeneList();
                SubstrateEvolution se = new SubstrateEvolution();
                se.generateConnections(inputNeuronPositions, outputNeuronPositions, network,
                    SubstrateEvolution.SAMPLE_WIDTH,
                    SubstrateEvolution.SAMPLE_TRESHOLD,
                    SubstrateEvolution.NEIGHBOR_LEVEL,
                    SubstrateEvolution.INCREASE_RESSOLUTION_THRESHOLD,
                    SubstrateEvolution.MIN_DISTANCE,
                    SubstrateEvolution.CONNECTION_TRESHOLD, //0.4. ConnectionThreshold
                    InputCount, OutputCount, -1.0f, -1.0f, 1.0f, 1.0f, ref con, ref hiddenNeuronPositions, stackCoordinate);

                // set up the hidden nodes
                for (uint a = 0; a < hiddenNeuronPositions.Count; a++)
                {
                    neurons.Add(new NeuronGene(hiddenCount + a + totalInputCount + totalOutputCount, NeuronType.Hidden, activationFunction));
                }



                foreach (ConnectionGene c in con)
                {
                    if (c.SourceNeuronId < InputCount)
                    {
                        c.SourceNeuronId += agent * InputCount;
                    }
                    else if (c.SourceNeuronId < InputCount + OutputCount)
                    {
                        c.SourceNeuronId = (c.SourceNeuronId - InputCount) + totalInputCount + agent * OutputCount;
                    }
                    else
                    {
                        c.SourceNeuronId = (uint)((c.SourceNeuronId - InputCount - OutputCount) + totalInputCount + totalOutputCount + hiddenCount);
                    }

                    if (c.TargetNeuronId < InputCount)
                    {
                        c.TargetNeuronId += agent * InputCount;
                    }
                    else if (c.TargetNeuronId < InputCount + OutputCount)
                    {
                        c.TargetNeuronId = (c.TargetNeuronId - InputCount) + totalInputCount + agent * OutputCount;
                    }
                    else
                    {
                        c.TargetNeuronId = (uint)((c.TargetNeuronId - InputCount - OutputCount) + totalInputCount + totalOutputCount + hiddenCount);
                    }

                    connections.Add(new ConnectionGene(connectionCounter++, c.SourceNeuronId, c.TargetNeuronId, c.Weight, ref c.coordinates));

                }
                hiddenCount += (uint)hiddenNeuronPositions.Count;
                agent++;

            }
            if (normalizeWeights)
            {
                normalizeWeightConnections(ref connections, neurons.Count);
            }
            SharpNeatLib.NeatGenome.NeatGenome sng = new SharpNeatLib.NeatGenome.NeatGenome(0, neurons, connections, (int)(totalInputCount), (int)(totalOutputCount));
            sng.networkAdaptable = adaptiveNetwork;
            sng.networkModulatory = modulatoryNet;
            return sng;
        }