private void Validate(NEATNetwork network) { Assert.AreEqual(1, network.OutputCount); Assert.AreEqual(2, network.InputCount); Assert.AreEqual(3, network.NetworkDepth); Assert.IsTrue(network.ActivationFunction is ActivationSigmoid); Assert.IsTrue(network.OutputActivationFunction is ActivationStep); Assert.AreEqual(0.0, ((ActivationStep)network.OutputActivationFunction).Center); Assert.AreEqual(1.0, ((ActivationStep)network.OutputActivationFunction).High); Assert.AreEqual(0.0, ((ActivationStep)network.OutputActivationFunction).Low); Assert.AreEqual(5, network.Neurons.Count); IDictionary <NEATNeuronType, NEATNeuron> neurons = new Dictionary <NEATNeuronType, NEATNeuron>(); foreach (NEATNeuron neuron in network.Neurons) { neurons[neuron.NeuronType] = neuron; } Assert.AreEqual(4, neurons.Count); NEATNeuron output = neurons[NEATNeuronType.Output]; NEATNeuron input = neurons[NEATNeuronType.Input]; Assert.AreEqual(1, input.OutputboundLinks.Count); Assert.AreEqual(0, input.InboundLinks.Count); Assert.AreEqual(0, output.OutputboundLinks.Count); Assert.AreEqual(1, output.InboundLinks.Count); }
public void Evaluate(IEvolutionState state, Individual ind, int subpop, int threadnum) { if (ind.Evaluated) { return; } if (!(ind is NEATIndividual)) { state.Output.Fatal("Whoa! It's not a NEATIndividual!!!", null); } var neatInd = (NEATIndividual)ind; if (!(neatInd.Fitness is SimpleFitness)) { state.Output.Fatal("Whoa! It's not a SimpleFitness!!!", null); } NEATNetwork net = neatInd.CreateNetwork(); double fitness = RunCartPole(net, state); ((SimpleFitness)neatInd.Fitness).SetFitness(state, fitness, fitness >= MAX_STEPS); neatInd.Evaluated = true; }
static void Main(string[] args) { // this form of ANN uses genetic algorithm to produce // hidden layer of neurons // A NEAT network starts with only an // input layer and output layer. The rest is evolved as the training progresses. // Connections inside of a NEAT neural network can be feedforward, recurrent, // or self - connected.All of these connection types will be tried by NEAT as it // attempts to evolve a neural network capable of the given task. IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); NEATPopulation pop = new NEATPopulation(2, 1, 1000); pop.Reset(); pop.InitialConnectionDensity = 1.0; // not required, but speeds processing. ICalculateScore score = new TrainingSetScore(trainingSet); // train the neural network TrainEA train = NEATUtil.ConstructNEATTrainer(pop, score); EncogUtility.TrainToError(train, 0.01); NEATNetwork network = (NEATNetwork)train.CODEC.Decode(train.BestGenome); // TODO no persistance? no means to peek structure? // test the neural network Console.WriteLine(@"Neural Network Results:"); EncogUtility.Evaluate(network, trainingSet); }
public static void SpawnCreature(Vector2 pos, NEATNetwork net, Color[] cs) { GameObject go = Instantiate(instance.creature, pos, Quaternion.Euler(0, 0, Random.Range(0, 360))); Creature c = go.GetComponent <Creature>(); creatures.Add(c); c.InitCreature(net, cs); }
public void InitCreature(NEATNetwork neatNet, Color[] c) { neat = true; colors = c; sr = GetComponent <SpriteRenderer>(); sound = transform.GetChild(0).GetComponent <SpriteRenderer>(); this.neatNet = neatNet; }
public void TestPersistSerial() { NEATNetwork network = Create(); SerializeObject.Save(SERIAL_FILENAME.ToString(), network); NEATNetwork network2 = (NEATNetwork)SerializeObject.Load(SERIAL_FILENAME.ToString()); Validate(network2); }
public void TestPersistEG() { NEATNetwork network = Create(); EncogDirectoryPersistence.SaveObject((EG_FILENAME), network); NEATNetwork network2 = (NEATNetwork)EncogDirectoryPersistence.LoadObject((EG_FILENAME)); Validate(network2); }
/// <summary> /// Convert the genes to an actual network. /// </summary> /// public override void Decode() { var pop = (NEATPopulation)Population; IList <NEATNeuron> neurons = new List <NEATNeuron>(); foreach (IGene gene in Neurons.Genes) { var neuronGene = (NEATNeuronGene)gene; var neuron = new NEATNeuron( neuronGene.NeuronType, neuronGene.Id, neuronGene.SplitY, neuronGene.SplitX, neuronGene.ActivationResponse); neurons.Add(neuron); } // now to create the links. foreach (IGene gene_0 in Links.Genes) { var linkGene = (NEATLinkGene)gene_0; if (linkGene.Enabled) { int element = GetElementPos(linkGene.FromNeuronID); NEATNeuron fromNeuron = neurons[element]; element = GetElementPos(linkGene.ToNeuronID); if (element == -1) { Console.Out.WriteLine("test"); } NEATNeuron toNeuron = neurons[element]; var link = new NEATLink(linkGene.Weight, fromNeuron, toNeuron, linkGene.Recurrent); fromNeuron.OutputboundLinks.Add(link); toNeuron.InboundLinks.Add(link); } } var network = new NEATNetwork(inputCount, outputCount, neurons, pop.NeatActivationFunction, pop.OutputActivationFunction, 0); network.Snapshot = pop.Snapshot; Organism = network; }
public int RunCartPole(NEATNetwork net, IEvolutionState state) { // double in[] = new double[5]; //Input loading array double out1; double out2; double twelve_degrees = 0.2094384; _x = _xDot = _theta = _thetaDot = 0.0; _steps = 0; double[][] input = TensorFactory.Create <double>(1, 5); while (_steps++ < MAX_STEPS) { /*-- setup the input layer based on the four inputs and bias --*/ //setup_input(net,x,x_dot,theta,theta_dot); input[0][0] = 1.0; //Bias input[0][1] = (_x + 2.4) / 4.8; input[0][2] = (_xDot + .75) / 1.5; input[0][3] = (_theta + twelve_degrees) / .41; input[0][4] = (_thetaDot + 1.0) / 2.0; double[] output = GetNetOutput(net, input, state); /*-- decide which way to push via which output unit is greater --*/ if (output[0] > output[1]) { _y = 0; } else { _y = 1; } /*--- Apply action to the simulated cart-pole ---*/ cart_pole(_y); /*--- Check for failure. If so, return steps ---*/ if (_x < -2.4 || _x > 2.4 || _theta < -twelve_degrees || _theta > twelve_degrees) { return(_steps); } } return(_steps); }
static void Main(string[] args) { Pokedex pD = new Pokedex(); BattleHandler b = new BattleHandler(); FeedForwardNetwork f = new FeedForwardNetwork(pD); NEATNetwork n = new NEATNetwork(pD); bool finished = false; while (!finished) { Console.Write("\nWhat do you want to do, select and confirm with enter:\n\n[1] Train FeedForward\n[2] Train NEAT\n[3] Test FeedForward\n[4] Test NEAT\n[5] Quit"); string option; option = Console.ReadLine(); switch (option) { case "1": f.train(); break; case "2": n.train(); break; case "3": f.test(); break; case "4": n.test(); break; case "5": finished = true; break; default: Console.Write("\n\nNot a valid option..."); break; } Console.Write("\nPress any key to continue..."); Console.ReadKey(); Console.Clear(); } }
/// <summary> /// Builds and trains a neat network. /// </summary> /// <param name="aset">The IMLDataset.</param> /// <param name="inputcounts">The inputcounts.</param> /// <param name="outputcounts">The outputcounts.</param> /// <param name="populationsize">The populationsize.</param> /// <param name="ToErrorTraining">To error rate you want to train too.</param> /// <returns>a trained netnetwork.</returns> public static NEATNetwork BuildTrainNeatNetwork(IMLDataSet aset, int inputcounts, int outputcounts, int populationsize, double ToErrorTraining) { NEATPopulation pop = new NEATPopulation(inputcounts, outputcounts, populationsize); ICalculateScore score = new TrainingSetScore(aset); // train the neural network ActivationStep step = new ActivationStep(); step.Center = 0.5; pop.OutputActivationFunction = step; NEATTraining train = new NEATTraining(score, pop); EncogUtility.TrainToError(train, ToErrorTraining); NEATNetwork network = (NEATNetwork)train.Method; return(network); }
public double[] GetNetOutput(NEATNetwork net, double[][] input, IEvolutionState state) { double[] output; int netDepth = net.MaxDepth(); net.LoadSensors(input[0]); for (int relax = 0; relax < netDepth; relax++) { net.Activate(state); } output = net.GetOutputResults(); net.Flush(); return(output); }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); NEATPopulation pop = new NEATPopulation(2, 1, 1000); pop.Reset(); pop.InitialConnectionDensity = 1.0; // not required, but speeds processing. ICalculateScore score = new TrainingSetScore(trainingSet); // train the neural network TrainEA train = NEATUtil.ConstructNEATTrainer(pop, score); EncogUtility.TrainToError(train, 0.01); NEATNetwork network = (NEATNetwork)train.CODEC.Decode(train.BestGenome); // test the neural network Console.WriteLine(@"Neural Network Results:"); EncogUtility.Evaluate(network, trainingSet); }
/// <summary> /// Sort the genomes. /// </summary> public void SortAndRecord() { foreach (IGenome g in Population.Genomes) { g.Decode(); PerformCalculateScore(g); } Population.Sort(); IGenome genome = Population.Best; double currentBest = genome.Score; if (Comparator.IsBetterThan(currentBest, bestEverScore)) { bestEverScore = currentBest; bestEverNetwork = ((NEATNetwork)genome.Organism); } bestEverScore = Comparator.BestScore(Error, bestEverScore); }
/// <inheritdoc/> public IMLMethod Decode(NEATPopulation pop, Substrate.Substrate substrate, IGenome genome) { // obtain the CPPN NEATCODEC neatCodec = new NEATCODEC(); NEATNetwork cppn = (NEATNetwork)neatCodec.Decode(genome); List <NEATLink> linkList = new List <NEATLink>(); IActivationFunction[] afs = new IActivationFunction[substrate.NodeCount]; IActivationFunction af = new ActivationSteepenedSigmoid(); // all activation functions are the same for (int i = 0; i < afs.Length; i++) { afs[i] = af; } double c = this.MaxWeight / (1.0 - this.MinWeight); BasicMLData input = new BasicMLData(cppn.InputCount); // First create all of the non-bias links. foreach (SubstrateLink link in substrate.Links) { SubstrateNode source = link.Source; SubstrateNode target = link.Target; int index = 0; foreach (double d in source.Location) { input.Data[index++] = d; } foreach (double d in target.Location) { input.Data[index++] = d; } IMLData output = cppn.Compute(input); double weight = output[0]; if (Math.Abs(weight) > this.MinWeight) { weight = (Math.Abs(weight) - this.MinWeight) * c * Math.Sign(weight); linkList.Add(new NEATLink(source.ID, target.ID, weight)); } } // now create biased links input.Clear(); int d2 = substrate.Dimensions; IList <SubstrateNode> biasedNodes = substrate.GetBiasedNodes(); foreach (SubstrateNode target in biasedNodes) { for (int i = 0; i < d2; i++) { input.Data[d2 + i] = target.Location[i]; } IMLData output = cppn.Compute(input); double biasWeight = output[1]; if (Math.Abs(biasWeight) > this.MinWeight) { biasWeight = (Math.Abs(biasWeight) - this.MinWeight) * c * Math.Sign(biasWeight); linkList.Add(new NEATLink(0, target.ID, biasWeight)); } } // check for invalid neural network if (linkList.Count == 0) { return(null); } linkList.Sort(); NEATNetwork network = new NEATNetwork(substrate.InputCount, substrate.OutputCount, linkList, afs); network.ActivationCycles = substrate.ActivationCycles; return(network); }
/// <summary> /// Convert the genes to an actual network. /// </summary> /// public override void Decode() { var pop = (NEATPopulation) Population; IList<NEATNeuron> neurons = new List<NEATNeuron>(); foreach (IGene gene in Neurons.Genes) { var neuronGene = (NEATNeuronGene) gene; var neuron = new NEATNeuron( neuronGene.NeuronType, neuronGene.Id, neuronGene.SplitY, neuronGene.SplitX, neuronGene.ActivationResponse); neurons.Add(neuron); } // now to create the links. foreach (IGene gene_0 in Links.Genes) { var linkGene = (NEATLinkGene) gene_0; if (linkGene.Enabled) { int element = GetElementPos(linkGene.FromNeuronID); NEATNeuron fromNeuron = neurons[element]; element = GetElementPos(linkGene.ToNeuronID); if (element == -1) { Console.Out.WriteLine("test"); } NEATNeuron toNeuron = neurons[element]; var link = new NEATLink(linkGene.Weight, fromNeuron, toNeuron, linkGene.Recurrent); fromNeuron.OutputboundLinks.Add(link); toNeuron.InboundLinks.Add(link); } } var network = new NEATNetwork(inputCount, outputCount, neurons, pop.NeatActivationFunction, pop.OutputActivationFunction, 0); network.Snapshot = pop.Snapshot; Organism = network; }
public virtual void Evaluate(IEvolutionState state, Individual ind, int subpopulation, int threadnum) { if (ind.Evaluated) { return; } if (!(ind is NEATIndividual)) { state.Output.Fatal("Whoa! It's not a NEATIndividual!!!", null); } NEATIndividual neatInd = (NEATIndividual)ind; if (!(neatInd.Fitness is SimpleFitness)) { state.Output.Fatal("Whoa! It's not a SimpleFitness!!!", null); } //The four possible input combinations to xor //The first number is for biasing double[][] input = { new [] { 1.0, 0.0, 0.0 }, // output 0 new [] { 1.0, 0.0, 1.0 }, // 1 new [] { 1.0, 1.0, 0.0 }, // 1 new [] { 1.0, 1.0, 1.0 }, // 0 }; double[] output = new double[4]; double[] expectedOut = { 0.0, 1.0, 1.0, 0.0 }; NEATNetwork net = neatInd.CreateNetwork(); int netDepth = net.MaxDepth(); // Load and activate the network on each input for (int i = 0; i < input.Length; i++) { net.LoadSensors(input[i]); for (int relax = 0; relax < netDepth; relax++) { net.Activate(state); } // only have one output, so let's get it output[i] = net.GetOutputResults()[0]; net.Flush(); } // calculate fitness double errorSum = 0; for (int i = 0; i < output.Length; i++) { errorSum += Math.Abs(output[i] - expectedOut[i]); } double fitness = (4.0 - errorSum) * (4.0 - errorSum); // this is from the original code for counting as ideal bool ideal = true; for (int i = 0; i < output.Length; i++) { if (Math.Abs(output[i] - expectedOut[i]) > 0.5) { ideal = false; break; } } ((SimpleFitness)neatInd.Fitness).SetFitness(state, fitness, ideal); neatInd.Evaluated = true; }
private NEATNetwork Create() { IList <NEATNeuron> neurons = new List <NEATNeuron>(); IActivationFunction afSigmoid = new ActivationSigmoid(); IActivationFunction afStep = new ActivationStep(); // create the neurons NEATNeuron input1 = new NEATNeuron( NEATNeuronType.Input, 1, 0.1, 0.2, 0.3); NEATNeuron input2 = new NEATNeuron( NEATNeuronType.Input, 2, 0.1, 0.2, 0.3); NEATNeuron bias = new NEATNeuron( NEATNeuronType.Bias, 3, 0.1, 0.2, 0.3); NEATNeuron hidden1 = new NEATNeuron( NEATNeuronType.Hidden, 4, 0.1, 0.2, 0.3); NEATNeuron output = new NEATNeuron( NEATNeuronType.Output, 5, 0.1, 0.2, 0.3); // add the neurons neurons.Add(input1); neurons.Add(input2); neurons.Add(hidden1); neurons.Add(bias); neurons.Add(output); // connect everything Link(0.01, input1, hidden1, false); Link(0.01, input2, hidden1, false); Link(0.01, bias, hidden1, false); Link(0.01, hidden1, output, false); // create the network NEATNetwork result = new NEATNetwork(2, 1, neurons, afSigmoid, afStep, 3); return(result); }
/// <summary> /// Sort the genomes. /// </summary> public void SortAndRecord() { foreach (IGenome g in Population.Genomes) { g.Decode(); PerformCalculateScore(g); } Population.Sort(); IGenome genome = Population.Best; double currentBest = genome.Score; if (Comparator.IsBetterThan(currentBest, bestEverScore)) { bestEverScore = currentBest; bestEverNetwork = ((NEATNetwork) genome.Organism); } bestEverScore = Comparator.BestScore(Error, bestEverScore); }
public void Render() { NEATGenome genome = (NEATGenome)this.pop.BestGenome; Substrate substrate = SubstrateFactory.factorSandwichSubstrate(resolution, resolution); HyperNEATCODEC codec = new HyperNEATCODEC(); NEATNetwork phenotype = (NEATNetwork)codec.Decode(this.pop, substrate, genome); TrialEvaluation trial = new TrialEvaluation(phenotype, this.testCase); IntPair actualPos = trial.Query(resolution); // clear what was there before GridCanvas.Children.Clear(); // double boxWidth = GridCanvas.ActualWidth / resolution; double boxHeight = GridCanvas.ActualHeight / resolution; double delta = 2.0 / resolution; int index = 0; for (int row = 0; row < resolution; row++) { double y = -1 + (row * delta); double boxY = row * boxHeight; for (int col = 0; col < resolution; col++) { double x = -1 + (col * delta); double boxX = col * boxWidth; Rectangle r = new Rectangle(); r.SetValue(Canvas.LeftProperty, boxX); r.SetValue(Canvas.TopProperty, boxY); r.Width = boxWidth; r.Height = boxHeight; if (this.testCase.GetPixel(x, y) > 0) { r.Fill = Brushes.Blue; } else { double d = trial.Output[index]; int c = trial.Normalize(d, 255); SolidColorBrush b = new SolidColorBrush(Color.FromRgb(255, (byte)c, 255)); r.Fill = b; r.Stroke = Brushes.Black; } GridCanvas.Children.Add(r); index++; } } Rectangle target = new Rectangle(); target.SetValue(Canvas.LeftProperty, actualPos.X * boxWidth); target.SetValue(Canvas.TopProperty, actualPos.Y * boxHeight); target.Width = boxWidth; target.Height = boxHeight; target.Fill = Brushes.Red; GridCanvas.Children.Add(target); }