Example #1
0
    public void ResetAgent(NeuralGenome newNeuralGenome = null)
    {
        if (newNeuralGenome != null)
        {
            neuralGenome = newNeuralGenome;
            //feedNetworkTask = new Task(() => neuralGenome.FeedNeuralNetwork(GenerateNetworkInputs()));
        }

        gameObject.SetActive(true);

        cartRb.transform.localPosition = Vector2.zero;

        StopRb(cartRb);
        StopRb(poleRb);

        var startingRotation = new Vector3(
            0, 0, PopulationProxy.Instance.startingAngle);

        poleRb.transform.SetPositionAndRotation(PopulationProxy.Instance.startingPos,
                                                Quaternion.Euler(startingRotation));

        neuralGenome.Fitness = 0;
        startOfGoodSolution  = -1;

        neuralGenome.NetworkOperationBaker.BakeNetwork(neuralGenome);
    }
Example #2
0
        public NeuralGenomeCmpInfo(NeuralGenome target1, NeuralGenome target2)
        {
            var excessPoint = Math.Min(
                target1.NeuralGenes.Max(ng => ng.Synapse.InnovationNb),
                target2.NeuralGenes.Max(ng => ng.Synapse.InnovationNb)
                );

            var groups = target1.NeuralGenes
                         .Concat(target2.NeuralGenes)
                         .GroupBy(ng => ng.Synapse.InnovationNb);

            Matching = groups.Where(x => x.Count() == 2)
                       .Select(x =>
                               new Tuple <NeuralGene, NeuralGene>(
                                   x.First(),
                                   x.Last())
                               );

            Disjoint = groups.Where(x =>
                                    x.Count() == 1 &&
                                    x.First().Synapse.InnovationNb <= excessPoint)
                       .SelectMany(x => x);

            Excess = groups.Where(x =>
                                  x.Count() == 1 &&
                                  x.First().Synapse.InnovationNb > excessPoint)
                     .SelectMany(x => x);

            Debug.Assert(Matching.Any(x =>
                                      !target1.NeuralGenes.Contains(x.Item1) ||
                                      !target2.NeuralGenes.Contains(x.Item2)));
        }
Example #3
0
        private float ComputeFitness(NeuralGenome genome)
        {
            genome.NetworkOperationBaker.BakeNetwork(genome);
            var fitness = 0d;

            for (var i = 0; i < 2; i++)
            {
                for (var j = 0; j < 2; j++)
                {
                    genome.FeedNeuralNetwork(new float[] { i, j });
                    var output = genome.Outputs.Select(x => x.Value).First();

                    var targetValue = i ^ j;
                    var delta       = Math.Abs(targetValue - output);
                    var gradient    = (i == j && i == 1) ? 5 : 1;
                    fitness -= delta * gradient;
                }
            }

            if (fitness >= -0.01f)
            {
                targetReached = true;
            }

            return((float)fitness);
        }
        public static string ToJson(
            this NeuralGenome target,
            float neuronRadius   = 0.03f,
            float maxWeight      = 1,
            float edgeWidth      = 3,
            bool printNeuronText = true)
        {
            var neurons = new List <JsonNeuron>();

            neurons.AddRange(GetInputNeurons(target));
            neurons.AddRange(GetOutputNeurons(target));
            neurons.AddRange(GetRemainingNeurons(target));

            var edges = GetJsonEdges(target);

            ProcessNetworkGroups(target, neurons, edges);

            var jsonObj = new
            {
                neuron_radius     = neuronRadius,
                max_weight        = maxWeight,
                edge_width        = edgeWidth,
                print_neurons_txt = printNeuronText,

                neurons,
                edges
            };

            var result = JsonConvert.SerializeObject(jsonObj);

            return(result);
        }
        private static JsonNeuron[] GetOutputNeurons(NeuralGenome target)
        {
            float x, y;
            float deltaY = 0;

            x = 1f - xPadding;
            if (target.Outputs.Count() == 1)
            {
                y = 0.5f;
            }
            else
            {
                y      = yPadding;
                deltaY = (1f - 2 * yPadding) / target.Outputs.Count();
            }
            var outputNeurons = target.Outputs.Select(n =>
            {
                var pos    = GetNeuronPos(n, x, y);
                var result = new JsonNeuron
                {
                    x     = pos.X,
                    y     = pos.Y,
                    innov = n.InnovationNb,
                    color = new[] { 1f, 0.917f, 0.721f },
                    label = "O"
                };

                y += deltaY;
                return(result);
            }).ToArray();

            return(outputNeurons);
        }
    void AddNodesAndConnectionToChild(NeuralGeneConnection ParentGenome, NeuralGenome childGenome)
    {
        NeuralGeneNode       tmpInputNode  = null;
        NeuralGeneNode       tmpOutputNode = null;
        NeuralGeneConnection tmpConnection = null;

        if (childGenome.HasNode(ParentGenome.inputNeuron.nodeNumber))
        {
            tmpInputNode = childGenome.GetNode(ParentGenome.inputNeuron.nodeNumber);
        }
        else
        {
            tmpInputNode            = AddNodeToGenome(ParentGenome.inputNeuron.nodeType, childGenome);
            tmpInputNode.bias       = ParentGenome.inputNeuron.bias;
            tmpInputNode.nodeNumber = ParentGenome.inputNeuron.nodeNumber;
        }
        if (childGenome.HasNode(ParentGenome.outputNeuron.nodeNumber))
        {
            tmpOutputNode = childGenome.GetNode(ParentGenome.outputNeuron.nodeNumber);
        }
        else
        {
            tmpOutputNode            = AddNodeToGenome(ParentGenome.outputNeuron.nodeType, childGenome);
            tmpOutputNode.bias       = ParentGenome.outputNeuron.bias;
            tmpOutputNode.nodeNumber = ParentGenome.outputNeuron.nodeNumber;
        }
        if (!childGenome.HasConnection(ParentGenome.inputNeuron.nodeNumber, ParentGenome.outputNeuron.nodeNumber))
        {
            childGenome.AddConnection(tmpInputNode, tmpOutputNode, ParentGenome.connectionIsEnabled, ParentGenome.innovation);
        }
    }
 private float FeedRNNPair(NeuralGenome genome, int[] values)
 {
     genome.ResetNeuronsValues();
     genome.FeedNeuralNetwork(new float[] { values[0] });
     genome.FeedNeuralNetwork(new float[] { values[1] });
     return(genome.Outputs.First().Value);
 }
        protected IEnumerable <BakedOperation> BakeNetworkInternal(NeuralGenome genome)
        {
            var computedNeurons = new HashSet <InnovationNumber>();

            foreach (var neuron in genome.Neurons.Values)
            {
                if (typeof(MemoryNeuron).IsAssignableFrom(neuron.GetType()))
                {
                    yield return(() =>
                    {
                        genome.Neurons[neuron.InnovationNb].Value =
                            genome.Neurons[(neuron as MemoryNeuron).TargetNeuron]
                            .Value;
                    });
                }
            }

            foreach (var outNeuron in genome.Outputs)
            {
                foreach (var op in RecursiveOp(genome, outNeuron, computedNeurons))
                {
                    yield return(op);
                }
            }
        }
        private IEnumerable <BakedOperation> RecursiveOp(
            NeuralGenome genome,
            Neuron target,
            HashSet <InnovationNumber> solvedNeurons)
        {
            if (target.IsStarting)
            {
                solvedNeurons.Add(target.InnovationNb);
                yield break;
            }

            Trace.Assert(!solvedNeurons.Contains(target.InnovationNb));

            solvedNeurons.Add(target.InnovationNb);
            yield return(() => target.Value = target.ValueCollector.InitialValue);

            foreach (var gene in genome.GetGenesToNeuron(target.InnovationNb))
            {
                var synapse = gene.Synapse;
                if (!synapse.enabled)
                {
                    continue;
                }

                if (!solvedNeurons.Contains(synapse.incoming))
                {
                    var neuronToSolve = genome.Neurons[synapse.incoming];
                    foreach (var op in RecursiveOp(genome, neuronToSolve, solvedNeurons))
                    {
                        yield return(op);
                    }
                }

                yield return(() =>
                {
                    var incommingNeurVal = genome.Neurons[synapse.incoming].Value;
                    var newDelta = synapse.Weight * incommingNeurVal;
                    var newVal = target.ValueCollector.Collect(
                        target.Value,
                        newDelta);

                    target.Value = newVal;
                });
            }

            if (target.ValueModifiers != null)
            {
                foreach (var valueModifier in target.ValueModifiers)
                {
                    yield return(() =>
                                 target.Value = (float)valueModifier(target.Value));
                }
            }

            if (target.Activation != null)
            {
                yield return(() =>
                             target.Value = (float)target.Activation(target.Value));
            }
        }
        protected override void DoMutation(NeuralGenome genome)
        {
            var delta = DeltaWeight();

            genome.NeuralGenes
            .Where(x => x.ExposedToMutations)
            .RandomChoice().Synapse.Weight +=
                (float)GARandomManager.Random.NextDouble(-delta, delta);
        }
Example #11
0
    public virtual void ResetAgent(
        Vector3 pos,
        NeuralGenome newNeuralGenome = null)
    {
        this.transform.position = pos;
        this.neuralGenome       = newNeuralGenome;

        gameObject.SetActive(true);
        this.neuralGenome.Fitness = 0;
    }
    public virtual void ResetAgent(NeuralGenome newNeuralGenome = null)
    {
        if (newNeuralGenome != null)
        {
            newNeuralGenome.NetworkOperationBaker.BakeNetwork(newNeuralGenome);
        }

        this.neuralGenome = newNeuralGenome;

        gameObject.SetActive(true);
        this.neuralGenome.Fitness = 0;
    }
        private static JsonNeuron[] GetRemainingNeurons(NeuralGenome target)
        {
            var remainingNodes = target.Neurons.Values
                                 .Where(n =>
                                        !target.Inputs.Contains(n) &&
                                        !target.Outputs.Contains(n) &&
                                        !target.Biasses.Contains(n) &&
                                        n.group == null)
                                 .ToArray();

            //  Compute the positions
            foreach (var node in remainingNodes)
            {
                Vector2 pos;

                if (!NeuronPos.ContainsKey(node.InnovationNb))
                {
                    pos = GetRandomPos(randomPosTries);
                    NeuronPos.Add(node.InnovationNb, pos);
                }
            }

            var remainingNeurons = remainingNodes.Select(n =>
            {
                JsonNeuron result;

                if (typeof(MemoryNeuron).IsAssignableFrom(n.GetType()))
                {
                    result = new JsonNeuron
                    {
                        x     = NeuronPos[n.InnovationNb].X,
                        y     = NeuronPos[n.InnovationNb].Y,
                        innov = n.InnovationNb,
                        color = new[] { 0.949f, 1, 0 },
                        label = string.Format("{0}<", (n as MemoryNeuron).TargetNeuron)
                    };
                }
                else
                {
                    result = new JsonNeuron
                    {
                        x     = NeuronPos[n.InnovationNb].X,
                        y     = NeuronPos[n.InnovationNb].Y,
                        innov = n.InnovationNb,
                    };
                }

                return(result);
            }).ToArray();

            return(remainingNeurons);
        }
 public NeatNeuralNetwork(NeuralActivationFunction[] _neuralActivationFunctions, int _inputSize, int _outputSize, double _learnRate = -1, double _momentum = -1)
 {
     genome = new NeuralGenome();
     neuralActivationFunctions = _neuralActivationFunctions;
     genome.learnRate          = _learnRate == -1 ? .1 : _learnRate;
     genome.momentum           = _momentum == -1 ? .4 : _momentum;
     genome.InputLayer         = new List <NeuralGeneNode>();
     genome.HiddenLayers       = new List <NeuralGeneNode>();
     genome.OutputLayer        = new List <NeuralGeneNode>();
     genome.nodes       = new List <NeuralGeneNode>();
     genome.connections = new List <NeuralGeneConnection>();
     genome.InitNewGenome(_inputSize, _outputSize, 0, _neuralActivationFunctions);
 }
 private static List <JsonEdge> GetJsonEdges(NeuralGenome target)
 {
     return(target.NeuralGenes
            .Select(x => x.Synapse)
            .Where(x => target.Neurons[x.incoming].group == null)
            .Where(x => target.Neurons[x.outgoing].group == null)
            .Select(x => new JsonEdge
     {
         start = x.incoming,
         end = x.outgoing,
         w = x.Weight
     }).ToList());
 }
Example #16
0
    public override void ResetAgent(Vector3 pos, NeuralGenome newNeuralGenome = null)
    {
        base.ResetAgent(pos, newNeuralGenome);

        for (int i = 0; i < rigidbodies.Length; i++)
        {
            rigidbodies[i].transform.SetPositionAndRotation(
                rigidbodiesInitialPos[i],
                rigidbodiesInitialRot[i]);
            rigidbodies[i].angularVelocity = 0;
            rigidbodies[i].velocity        = Vector3.zero;
            rigidbodies[i].Sleep();
        }
    }
    public override void ResetAgent(Vector3 pos, NeuralGenome newNeuralGenome = null)
    {
        base.ResetAgent(pos, newNeuralGenome);
        transform.rotation = Quaternion.Euler(0, 0, 90);

        for (int i = 0; i < rigidbodies.Length; i++)
        {
            rigidbodies[i].transform.localPosition = initialAgentPartStates[i].localPos;
            rigidbodies[i].transform.localRotation = initialAgentPartStates[i].localRot;
            rigidbodies[i].angularVelocity         = 0;
            rigidbodies[i].velocity = Vector2.zero;
            rigidbodies[i].Sleep();
        }
    }
        protected override void DoMutation(NeuralGenome genome)
        {
            var rnd         = GARandomManager.Random;
            var deltaWeight = DeltaWeight();

            var targets = genome.NeuralGenes
                          .Where(ng =>
                                 ng.ExposedToMutations &&
                                 rnd.NextDouble() <= SynapseMutationChance);

            foreach (var ng in targets)
            {
                ng.Synapse.Weight += (float)rnd.NextDouble(-deltaWeight, deltaWeight);
            }
        }
        private float ComputeFitness(NeuralGenome genome)
        {
            var fitness = 0d;

            for (var i = 0; i < Math.Pow(2, inputs) - 1; i++)
            {
                genome.NetworkOperationBaker.BakeNetwork(genome);
                genome.FeedNeuralNetwork(GetBits(i).Select(x => (float)x).ToArray());
                var expectedOutput = GetBits(i + 1);
                fitness -= genome.Outputs.Select(x => x.Value)
                           .Zip(expectedOutput, (o, e) => Math.Abs(o - e))
                           .Sum();
            }

            return((float)fitness);
        }
 public NeatNeuralNetwork(NeuralActivationFunction[] _neuralActivationFunctions, int _inputSize, int _outputSize, int _globalInnovation, bool _firstGenome,
                          Func <int, float> fitnessFunction, float[] _mutationRates, Func <int, int> _increaseGlobalInnovation, double _learnRate = -1, double _momentum = -1)
 {
     genome = new NeuralGenome();
     neuralActivationFunctions = _neuralActivationFunctions;
     genome.learnRate          = _learnRate == -1 ? .1 : _learnRate;
     genome.momentum           = _momentum == -1 ? .4 : _momentum;
     genome.random             = Random;
     genome.InputLayer         = new List <NeuralGeneNode>();
     genome.HiddenLayers       = new List <NeuralGeneNode>();
     genome.OutputLayer        = new List <NeuralGeneNode>();
     genome.nodes           = new List <NeuralGeneNode>();
     genome.connections     = new List <NeuralGeneConnection>();
     genome.mutationRates   = _mutationRates;
     genome.fitnessFunction = fitnessFunction;
     genome.InitNewGenome(_inputSize, _outputSize, _globalInnovation, _neuralActivationFunctions, _increaseGlobalInnovation, _firstGenome);
 }
    public void CalculateFitness()
    {
        fitnessSum = 0;
        NeatNeuralNetwork best = Population[0];

        for (int i = 0; i < Population.Count; i++)
        {
            fitnessSum += Population[i].genome.CalculateFitness(i);

            if (Population[i].genome.fitness > best.genome.fitness)
            {
                best = Population[i];
            }
        }

        BestFitness = (float)best.genome.fitness;
        BestGenes   = best.genome;
    }
    public NeatGeneticAlgorithm(int populationSize, Random random, int elitist, int mercy, NeuralActivationFunction[] _neuralActivationFunctions,
                                int _inputSize, int _outputSize, Func <int, float> _fitnessFunction, double _learnRate = -1, double _momentum = -1, float mutationRate = 0.01f,
                                float[] _mutationRates = null, Func <NeatNeuralNetwork, float, bool> mutateGene = null)
    {
        globalInnovation          = 1;
        Generation                = 1;
        MutationRate              = mutationRate;
        this.random               = random;
        this.elitist              = elitist;
        this.mercy                = mercy;
        learnRate                 = _learnRate;
        momentum                  = _momentum;
        inputSize                 = _inputSize;
        outputSize                = _outputSize;
        neuralActivationFunctions = _neuralActivationFunctions;
        if (_mutationRates != null)
        {
            mutationRates = _mutationRates;
        }
        else
        {
            for (int i = 0; i < mutationRates.Length; i++)
            {
                mutationRates[i] = (float)(i + 1) / (float)(mutationRates.Length + 1);
            }
        }

        fitnessFunction = _fitnessFunction;
        Population      = new List <NeatNeuralNetwork>(populationSize);
        newPopulation   = new List <NeatNeuralNetwork>(populationSize);
        MatingPool      = new List <NeatNeuralNetwork>();
        BestGenes       = new NeuralGenome();

        Population.Add(new NeatNeuralNetwork(neuralActivationFunctions, inputSize, outputSize, globalInnovation, true, fitnessFunction, mutationRates, IncreaseGlobalInnovation, learnRate, momentum));
        for (int i = 0; i < populationSize - 1; i++)
        {
            Population.Add(new NeatNeuralNetwork(neuralActivationFunctions, inputSize, outputSize, globalInnovation, false, fitnessFunction, mutationRates, IncreaseGlobalInnovation, learnRate, momentum));
        }

        for (int i = 0; i < populationSize; i++)
        {
            Population[i].genome.Mutate(mutationRate);
        }
    }
        private static JsonNeuron[] GetInputNeurons(NeuralGenome target)
        {
            float x, y;
            float deltaY = 0;

            // Inputs
            x = xPadding;
            y = yPadding;
            var inputs = target.Inputs.Concat(target.Biasses);

            deltaY = (1f - 2 * yPadding) / inputs.Count();
            var inputNeurons = inputs.Select(n =>
            {
                JsonNeuron result;

                var pos = GetNeuronPos(n, x, y);
                if (target.Inputs.Contains(n))
                {
                    result = new JsonNeuron
                    {
                        x     = pos.X,
                        y     = pos.Y,
                        innov = n.InnovationNb,
                        color = new[] { 1f, 0.721f, 0.992f },
                        label = "I"
                    };
                }
                else
                {
                    result = new JsonNeuron
                    {
                        x     = pos.X,
                        y     = pos.Y,
                        innov = n.InnovationNb,
                        color = new[] { 0.627f, 0.160f, 1f },
                        label = "B"
                    };
                }
                y += deltaY;
                return(result);
            }).ToArray();

            return(inputNeurons);
        }
Example #24
0
    public override void ResetAgent(NeuralGenome newNeuralGenome = null)
    {
        base.ResetAgent(newNeuralGenome);

        ball.transform.localPosition = initialLocalBallPos;
        ball.rotation        = Quaternion.Euler(Vector3.zero);
        ball.velocity        = Vector3.zero;
        ball.angularVelocity = Vector3.zero;
        ball.Sleep();

        platform.localPosition     = Vector3.zero;
        platform.rotation          = Quaternion.Euler(Vector3.zero);
        platformRb.velocity        = Vector3.zero;
        platformRb.angularVelocity = Vector3.zero;
        platformRb.Sleep();

        platform.gameObject.SetActive(true);
        ball.gameObject.SetActive(true);

        startTime = Time.time;
    }
        private float ComputeFitness(NeuralGenome genome)
        {
            genome.NetworkOperationBaker.BakeNetwork(genome);
            var fitness = 0d;

            // Continuous input.
            foreach (var dataset in datasets)
            {
                genome.ResetNeuronsValues();

                int expectedResult = 0;
                foreach (var input in dataset)
                {
                    expectedResult ^= input;
                    genome.FeedNeuralNetwork(new float[] { input });

                    var delta = Math.Abs(expectedResult - genome.Outputs[0].Value);
                    fitness -= delta;
                }
            }

            // Pair input
            //foreach (var pair in samplePairs)
            //{
            //	var output = FeedRNNPair(genome, pair);
            //	var expectedResult = pair[0] ^ pair[1];

            //	var delta = Math.Abs(expectedResult - output);
            //             fitness -= delta;
            //}

            if (fitness >= -1f)
            {
                targetReached = true;
            }

            return((float)fitness);
        }
    NeuralGeneNode AddNodeToGenome(NeuralNodeType type, NeuralGenome genome)
    {
        switch (type)
        {
        case NeuralNodeType.Input:
            return(genome.AddInputNode(neuralActivationFunctions[0]));

            break;

        case NeuralNodeType.Output:
            return(genome.AddOutputNode(neuralActivationFunctions[2]));

            break;

        case NeuralNodeType.Hidden:
            return(genome.AddHiddenNode(neuralActivationFunctions[1]));

            break;

        default:
            break;
        }
        return(null);
    }
 void CrossOverParents(NeuralGeneConnection thisParentConnection, NeuralGeneConnection otherParentConnection, NeuralGenome childGenome, int bestGenome)
 {
     if (thisParentConnection.innovation == otherParentConnection.innovation)
     {
         //choose 50/50
         int parent = Random.NextDouble() < 0.5f ? 1 : 2;
         //other parent
         if (parent == 1)
         {
             AddNodesAndConnectionToChild(otherParentConnection, childGenome);
         }
         else // this parent
         {
             AddNodesAndConnectionToChild(thisParentConnection, childGenome);
         }
     }
     else
     {
         if (bestGenome == 0)
         {
             //choose 50/50
             int parent = Random.NextDouble() < 0.5f ? 1 : 2;
             //other parent
             if (parent == 1)
             {
                 AddNodesAndConnectionToChild(otherParentConnection, childGenome);
             }
             else // this parent
             {
                 AddNodesAndConnectionToChild(thisParentConnection, childGenome);
             }
         }
         else if (bestGenome == 1)
         {
             // choose from other parent
             AddNodesAndConnectionToChild(otherParentConnection, childGenome);
         }
         else
         {
             // choose from this parent
             AddNodesAndConnectionToChild(thisParentConnection, childGenome);
         }
     }
 }
 protected abstract void DoMutation(NeuralGenome genome);
 void HandleExtraNodes(NeuralGeneConnection thisParentConnection, NeuralGeneConnection otherParentConnection, NeuralGenome childGenome, int bestGenome, bool takeNotOptimum)
 {
     if (bestGenome == 0)
     {
         //other parent
         if (otherParentConnection != null)
         {
             AddNodesAndConnectionToChild(otherParentConnection, childGenome);
         }
         else if (thisParentConnection != null) // this parent
         {
             AddNodesAndConnectionToChild(thisParentConnection, childGenome);
         }
     }
     else if (bestGenome == 1 && otherParentConnection != null)
     {
         // choose from other parent
         AddNodesAndConnectionToChild(otherParentConnection, childGenome);
     }
     else if (bestGenome == 2 && thisParentConnection != null)
     {
         // choose from this parent
         AddNodesAndConnectionToChild(thisParentConnection, childGenome);
     }
     else if (takeNotOptimum && thisParentConnection != null)
     {
         AddNodesAndConnectionToChild(thisParentConnection, childGenome);
     }
     else if (takeNotOptimum && otherParentConnection != null)
     {
         AddNodesAndConnectionToChild(otherParentConnection, childGenome);
     }
 }
 public void BakeNetwork(NeuralGenome genome)
 {
     bakedOperations = BakeNetworkInternal(genome).ToArray();
     IsBaked         = true;
 }