//---------MAIN LOOP-------------------// public void run(int iterations, PerfGraph graph, bool useGraph) { for (int i = 0; i < weights.Length; i++) { initialWeights[i] = weights[i]; } for (int i = 0; i < nodes.Count; i++) { for (int j = 0; j < nodes[i].weights.Length; j++) { initialNodeWeights[i, j] = nodes[i].weights[j]; } } for (int i = 0; i < iterations; i++) { bool done = true; graph.updateLabel_individual("MLP on epoch:" + i); for (int j = 0; (j < inputs.Count); j++) { setNodeOutputs(j); setOutput(j); setOutputError(j); setNodeErrors(j); changeOutputWeights(j); changeInputWeights(j); if (useGraph) { graph.updateRates(inputs[j].expectedOutput, inputs[j].output); //Console.WriteLine(inputs[j].error); } if (Math.Abs(inputs[j].error) > 0.01) { done = false; } } if (useGraph) { graph.updateMSE(meanSquaredError()); } if (done) { break; } } }
//---------MAIN LOOP-------------------// public void run(int iterations, PerfGraph graph, bool useGraph) { for (int i = 0; i < weights.Length; i++) { initialWeights[i] = weights[i]; } for(int i = 0; i < nodes.Count; i++) { for(int j = 0; j< nodes[i].weights.Length; j++) { initialNodeWeights[i, j] = nodes[i].weights[j]; } } for(int i = 0; i < iterations; i ++) { bool done = true; graph.updateLabel_individual("MLP on epoch:" + i); for (int j = 0; (j<inputs.Count); j++) { setNodeOutputs(j); setOutput(j); setOutputError(j); setNodeErrors(j); changeOutputWeights(j); changeInputWeights(j); if (useGraph) { graph.updateRates(inputs[j].expectedOutput, inputs[j].output); //Console.WriteLine(inputs[j].error); } if (Math.Abs(inputs[j].error) > 0.01) { done = false; } } if (useGraph) { graph.updateMSE(meanSquaredError()); } if (done) { break; } } }
public void run() { int iteration = 0; while (iteration < iterations) { graph.updateLabel_evolutionary("EA on iteration " + iteration); double[] fitness = new double[mlps.Count]; //double[,] outputs = int i = 0; foreach (MLP mlp in mlps) { graph.updateLabel_evolutionary("EA on iteration " + iteration + " training MLP:" + (i + 1)); mlp.run(50, graph, useGraph); fitness[i] = mlp.meanSquaredError(); mlp.resetMLP(); i++; } bool converged = true; double minSquaredError = 9999; foreach (double error in fitness) { if (error < minSquaredError) { minSquaredError = error; } if (error != fitness[0]) { converged = false; } } graph.updateLabel_evolutionary("EA on iteration " + iteration + " min MSE:" + minSquaredError); if (converged) { graph.updateLabel_evolutionary("CONVERGED after :" + iteration + " iterations with MSE:" + minSquaredError); graph.updateLabel_individual(""); break; } if (evolvePopSize > 0) { //get the top and bottom population List <KeyValuePair <double, MLP> > evolvingMLP = new List <KeyValuePair <double, MLP> >(); List <KeyValuePair <double, MLP> > removingMLP = new List <KeyValuePair <double, MLP> >(); for (i = 0; i < evolvePopSize; i++) { evolvingMLP.Add(new KeyValuePair <double, MLP>(fitness[i], mlps[i])); removingMLP.Add(new KeyValuePair <double, MLP>(fitness[i], mlps[i])); } for (i = evolvePopSize; i < popSize; i++) { var maxBestFitness = evolvingMLP.MaxBy(kvp => kvp.Key).Key; var maxBestFitnessPair = evolvingMLP.MaxBy(kvp => kvp.Key); var minWorstFitness = removingMLP.MinBy(kpv => kpv.Key).Key; var minWorstFitnessPair = removingMLP.MinBy(kpv => kpv.Key); if (fitness[i] < maxBestFitness) { evolvingMLP.Remove(maxBestFitnessPair); evolvingMLP.Add(new KeyValuePair <double, MLP>(fitness[i], mlps[i])); } if (fitness[i] > minWorstFitness) { removingMLP.Remove(minWorstFitnessPair); removingMLP.Add(new KeyValuePair <double, MLP>(fitness[i], mlps[i])); } } //evolve the top population List <MLP> temp = new List <MLP>(); foreach (KeyValuePair <double, MLP> kvp in removingMLP) { temp.Add(kvp.Value); } //remove the population with the bottom fitness foreach (MLP m in temp) { mlps.Remove(m); } temp.Clear(); foreach (KeyValuePair <double, MLP> kvp in evolvingMLP) { temp.Add(DeepClone(kvp.Value)); } //add the evolved mlps to the population List <MLP> evolved = evolvePop(temp); foreach (MLP m in evolved) { mlps.Add(m); } } iteration++; } }