private INeuralModel Init4InputsNeuralModel() { var model = new NeuralModelBase(); model.defaultWeightInitializer = () => GARandomManager.NextFloat(-1, 1);; model.WeightConstraints = new Tuple <float, float>(-50f, 50f); var layers = new List <Neuron[]>() { model.AddInputNeurons(CartPoleAgent.nbOfInputs).ToArray(), model.AddOutputNeurons( 1, ActivationFunctions.TanH ).ToArray(), }; model.ConnectLayers(layers); var outputNeuron = layers.Last().Last(); model.AddConnection(outputNeuron.InnovationNb, outputNeuron.InnovationNb); return(model); }
private INeuralModel InitNeuralModel() { var model = new NeuralModelBase(); model.defaultWeightInitializer = () => GARandomManager.NextFloat(-1f, 1f); model.WeightConstraints = new Tuple <float, float>(-10, 10); var bias = model.AddBiasNeuron(); var layers = new List <Neuron[]>() { model.AddInputNeurons(inputs).ToArray(), model.AddNeurons( new Neuron(-1, ActivationFunctions.Gaussian) { //ValueModifiers = new[] { Dropout.DropoutFunc(0.06f) }, }, count: inputs ).ToArray(), model.AddOutputNeurons( inputs, ActivationFunctions.Sigmoid ).ToArray(), }; model.ConnectLayers(layers); model.ConnectBias(bias, layers.Skip(1)); return(model); }
private void FixedUpdate() { if (!RailwayDelimiteter.IsInsideLimits(cartRb.transform)) { PopulationProxy.Instance.DeactivateAgent(this); } if (GetPoleRotPart() < PopulationProxy.Instance.removeAgentIfBelowAnglePart) { PopulationProxy.Instance.DeactivateAgent(this); } if (isAI && neuralGenome != null) { if (Time.time > lastRandomTorqueApplication + PopulationProxy.Instance.forceInterval) { lastRandomTorqueApplication = Time.time; var force = PopulationProxy.Instance.force * PopulationProxy.Instance.forceMultiplier; poleRb.AddForce(Vector2.right * GARandomManager.NextFloat(-force, force) ); } MoveFromNetwork(); neuralGenome.Fitness += ComputeFitnessForThisTick(); } }
protected override INeuralModel InitNeuralModel() { var model = new NeuralModelBase(); model.defaultWeightInitializer = () => GARandomManager.NextFloat(-1, 1); model.WeightConstraints = weightConstraints.ToTuple(); var layers = new List <Neuron[]>() { // Inputs model.AddInputNeurons( agentPrefab.GetComponent <AgentProxy>().nbOfInputs ).ToArray(), model.AddNeurons( new Neuron(-1, ActivationFunctions.TanH), count: agentPrefab.GetComponent <AgentProxy>().nbOfInputs * 4 ).ToArray(), // Outputs model.AddOutputNeurons( agentPrefab.GetComponent <AgentProxy>().nbOfOutputs, ActivationFunctions.Sigmoid ).ToArray(), }; model.ConnectLayers(layers); var outputs = layers.Last(); //model.ConnectNeurons(outputs, outputs); model.ConnectLayers(new[] { outputs, outputs }); return(model); }
public NeuralModelBase() { synapseInnovNbTracker = new SynapseInnovNbTracker(); Neurons = new Dictionary <InnovationNumber, Neuron>(); Synapses = new Dictionary <Synapse, WeightInitializer>(); this.defaultWeightInitializer = () => GARandomManager.NextFloat(-1f, 1f); }
public override void Prepare( IList <IGenome> sampleGenomes, GenomeProductionSession thisSession, GenomeProductionSession totalSession, int totalNbToSelect) { base.Prepare( sampleGenomes, thisSession, totalSession, totalNbToSelect); var samplesCount = ComputeParticipantsCount(sampleGenomes.Count()); var candidates = sampleGenomes.Take(samplesCount).ToList(); var minFitness = candidates.Min(x => x.Fitness); if (minFitness < 0) { minFitness *= -1; } else { minFitness = 0; } var genomAndFitn = candidates.ToDictionary( x => x, x => x.Fitness + minFitness + float.Epsilon); var fitnessSum = genomAndFitn.Values.Sum(); allParents = new Queue <IGenome>(totalNbToSelect); for (int i = 0; i < totalNbToSelect; i++) { var targetFitness = GARandomManager.NextFloat(0, fitnessSum); IGenome target = null; foreach (var pair in genomAndFitn) { if (targetFitness <= pair.Value) { target = pair.Key; break; } else { targetFitness -= pair.Value; } } Debug.Assert(target != null); allParents.Enqueue(target); fitnessSum -= genomAndFitn[target]; genomAndFitn.Remove(target); } }
public void ApplyMutations(IGenome genome) { bool dependentMutationsAreOn = true; bool independentMutationsAreOn = true; var dependentMutationsValue = GARandomManager.NextFloat(); if (DependentMutationsChance <= 0.999f) { dependentMutationsAreOn = GARandomManager.NextFloat() < DependentMutationsChance; } if (IndependentMutationsChance <= 0.999f) { independentMutationsAreOn = GARandomManager.NextFloat() < IndependentMutationsChance; } foreach (var mutationEntry in MutationEntries) { switch (mutationEntry.mutationType) { case EMutationType.Dependent: if (!dependentMutationsAreOn) { break; } if (dependentMutationsValue <= mutationEntry.chance) { mutationEntry.mutation.Mutate(genome); } dependentMutationsValue -= mutationEntry.chance; break; case EMutationType.Independent: if (!independentMutationsAreOn) { break; } if (GARandomManager.NextFloat() <= mutationEntry.chance) { mutationEntry.mutation.Mutate(genome); } break; case EMutationType.Required: mutationEntry.mutation.Mutate(genome); break; } } }
private INeuralModel InitModel() { var model = new NeuralModelBase(); model.defaultWeightInitializer = () => GARandomManager.NextFloat(-1, 1); model.WeightConstraints = new Tuple <float, float>(-5, 5); var bias = model.AddBiasNeuron(); var layers = new[] { model.AddInputNeurons(1).ToArray(), model.AddNeurons( sampleNeuron: new Neuron(-1, ActivationFunctions.Gaussian), count: 1 ).ToArray(), model.AddOutputNeurons(1, ActivationFunctions.Sigmoid).ToArray() }; model.ConnectBias(bias, layers.Skip(1)); model.ConnectLayers(layers); // Adding RNN //foreach (var neuron in layers[1]) //{ // var mem = model.AddNeurons( // sampleNeuron: new MemoryNeuron(-1, neuron.InnovationNb), // count: 1); // model.AddConnection(mem[0].InnovationNb, neuron.InnovationNb); //} // Addin LSTM var input = layers.First().First(); var output = layers[1].First(); var a = new[] { 1, 2, 3 }; a.Where(x => x == 0) .GroupBy(x => x) .ToArray(); model.AddLSTM(out var lstmIn, out var lstmOut, biasNeuron: bias); model.AddConnection(input, lstmIn); model.AddConnection(lstmOut, output); return(model); }
private INeuralModel Init2InputsNeuralModel() { var model = new NeuralModelBase(); model.defaultWeightInitializer = () => GARandomManager.NextFloat(-1, 1);; model.WeightConstraints = new Tuple <float, float>(-5f, 5f); var bias = model.AddBiasNeuron(); var layers = new List <Neuron[]>() { model.AddInputNeurons(CartPoleAgent.nbOfInputs).ToArray(), model.AddOutputNeurons( 1, ActivationFunctions.TanH ).ToArray(), }; model.ConnectLayers(layers); var outputNeuron = layers.Last().Last(); //var memNeurons = model.AddNeurons( // sampleNeuron: new MemoryNeuron(-1, outputNeuron.InnovationNb), // count: 1 //).ToArray(); // RNN //var innerMemNeurons = model.AddNeurons( // sampleNeuron: new Neuron(-1, ActivationFunctions.TanH), // count: 1).ToArray(); //model.ConnectLayers(new[] { memNeurons, innerMemNeurons }); //model.ConnectLayers(new[] { layers[0], innerMemNeurons, new[] { outputNeuron } }); // LSTM Neuron lstmIn, lstmOut; model.AddLSTM(out lstmIn, out lstmOut, biasNeuron: bias); //model.ConnectNeurons(memNeurons, new[] { lstmIn }).ToArray(); model.ConnectNeurons(layers[0], new[] { lstmIn }).ToArray(); model.ConnectNeurons(new[] { lstmOut }, layers.Last()).ToArray(); return(model); }
private void FixedUpdate() { OnFixedUpdate(); if (Time.time > lastTimeRndForceApplied + randomForceInterval) { foreach (var agent in agents) { if (agent.gameObject.activeSelf) { (agent as PlatformAgent).ball.AddForce( GARandomManager.NextFloat(-randomForce, randomForce), 0, 0 //GARandomManager.NextFloat(-randomForce, randomForce) ); } } lastTimeRndForceApplied = Time.time; } }
private INeuralModel InitModel() { var model = new NeuralModelBase(); model.defaultWeightInitializer = () => GARandomManager.NextFloat(-3, 3); model.WeightConstraints = new Tuple <float, float>(-20, 20); var bias = model.AddBiasNeuron(); var layers = new[] { model.AddInputNeurons(2).ToArray(), model.AddNeurons( sampleNeuron: new Neuron(-1, ActivationFunctions.Gaussian), count: 1 ).ToArray(), model.AddOutputNeurons(1, ActivationFunctions.Sigmoid).ToArray() }; model.ConnectBias(bias, layers.Skip(1)); model.ConnectLayers(layers); return(model); }
private static Vector2 GetRandomPos(int tries = 3) { var result = new Vector2(0, 0); for (int i = 0; i < tries; i++) { result.X = GARandomManager.NextFloat( xPadding + distBetweenNodes, 1f - (xPadding + distBetweenNodes)); result.Y = GARandomManager.NextFloat( yPadding + distBetweenNodes, 1f - (yPadding + distBetweenNodes)); var n = NeuronPos.Values.Count(pos => Vector2.Distance(pos, result) < distBetweenNodes); if (n == 0) { break; } } return(result); }
/// <summary> /// Try few times to find an unused pair (unless avoidPairRepetition /// is false. /// </summary> protected override IEnumerable <IGenome> PerformSelection(int nbToSelect) { var result = new IGenome[nbToSelect]; for (int tries = 0; tries < nbOfTriesToAvoidRepetition; tries++) { // Remove the best genomes depending on the number of tries. if (tries != 0 && tries % removeBestIfExceedsTriesCap == 0) { var best = genomeAndFitn.MaxBy(x => x.Value).Key; genomeAndFitn.Remove(best); } // Use a copy of the dictionary so that the sets don't have // repeating genomes. var genomeAndFitnCpy = genomeAndFitn.ToDictionary( x => x.Key, x => x.Value); var fitness = genomeAndFitnCpy.Sum(x => x.Value); for (int i = 0; i < nbToSelect; i++) { var targetFitness = GARandomManager.NextFloat(0, fitness); IGenome target = null; foreach (var pair in genomeAndFitnCpy) { if (targetFitness <= pair.Value) { target = pair.Key; break; } else { targetFitness -= pair.Value; } } Debug.Assert(target != null); fitness -= genomeAndFitnCpy[target]; genomeAndFitnCpy.Remove(target); result[i] = target; } if (!avoidPairRepetition) { return(result); } // Order the set, to make it work for cases like: // (1, 5, 2) and (2, 1, 5). result = result.OrderBy(x => x.Fitness).ToArray(); if (!SetOfGenomesAlreadyUsed(result)) { usedSetsOfGenomes.Add(result); return(result); } } throw new Exception("Too many tries for a selection."); }
protected override INeuralModel InitNeuralModel() { var model = new NeuralModelBase(); model.defaultWeightInitializer = () => GARandomManager.NextFloat(-1, 1); model.WeightConstraints = new Tuple <float, float>( weightConstraints.x, weightConstraints.y ); var bias = model.AddBiasNeuron(); var layers = new List <Neuron[]>() { // Inputs model.AddInputNeurons( agentPrefab.GetComponent <AgentProxy>().nbOfInputs ).ToArray(), //model.AddNeurons( // new Neuron(-1, ActivationFunctions.TanH), // count: 4 //).ToArray(), // Outputs model.AddOutputNeurons( agentPrefab.GetComponent <AgentProxy>().nbOfOutputs, ActivationFunctions.Sigmoid ).ToArray(), }; model.ConnectLayers(layers); model.ConnectBias(bias, layers.Skip(1)); //var memoryNeurons = new List<Neuron>(); //foreach (var neuron in layers.Last()) //{ // var mem = model.AddNeurons( // sampleNeuron: new MemoryNeuron(-1, neuron.InnovationNb), // count: 1 // ); // memoryNeurons.Add(mem[0]); //} //var memoryProcLayer1 = model.AddNeurons( // new Neuron(-1, ActivationFunctions.TanH), // count: 4 //).ToArray(); //var memoryProcLayer2 = model.AddNeurons( // new Neuron(-1, ActivationFunctions.TanH), // count: 4 // ).ToArray(); //model.ConnectLayers( // new Neuron[][] // { // memoryNeurons.ToArray(), // memoryProcLayer1, // memoryProcLayer2, // layers.Last() // } //); Neuron lstmIn, lstmOut; model.AddLSTM(out lstmIn, out lstmOut, biasNeuron: bias); model.ConnectNeurons(layers[0], new[] { lstmIn }).ToArray(); model.ConnectNeurons(new[] { lstmOut }, layers.Last()).ToArray(); return(model); }