private void ComputeGradient(Gradients gradients, List <double> inputs, List <double> requiredOutputs) { Activities(inputs); for (int i = NumberOfLayers() - 1; i >= 1; i--) { NeuralLayer currentLayer = GetLayer(i); if (currentLayer.IsLayerTop()) { for (int j = 0; j < currentLayer.NumberOfNeurons(); j++) { Neuron currentNeuron = currentLayer.GetNeuron(j); gradients.SetThreshold(i, j, currentNeuron.Output * (1 - currentNeuron.Output) * (currentNeuron.Output - requiredOutputs[j])); } for (int j = 0; j < currentLayer.NumberOfNeurons(); j++) { Neuron currentNeuron = currentLayer.GetNeuron(j); for (int k = 0; k < currentNeuron.NumberOfInputs(); k++) { NeuralInput currentInput = currentNeuron.GetInput(k); gradients.SetWeight(i, j, k, gradients.GetThreshold(i, j) * currentLayer.LowerLayer().GetNeuron(k).Output); } } } else { for (int j = 0; j < currentLayer.NumberOfNeurons(); j++) { double aux = 0; for (int ia = 0; ia < currentLayer.UpperLayer().NumberOfNeurons(); ia++) { aux += gradients.GetThreshold(i + 1, ia) * currentLayer.UpperLayer().GetNeuron(ia).GetInput(j).Weight; } gradients.SetThreshold(i, j, currentLayer.GetNeuron(j).Output *(1 - currentLayer.GetNeuron(j).Output) * aux); } for (int j = 0; j < currentLayer.NumberOfNeurons(); j++) { Neuron currentNeuron = currentLayer.GetNeuron(j) ; for (int k = 0; k < currentNeuron.NumberOfInputs(); k++) { NeuralInput currentInput = currentNeuron.GetInput(k); gradients.SetWeight(i, j, k, gradients.GetThreshold(i, j) * currentLayer.LowerLayer().GetNeuron(k).Output); } } } } }
private void Adaptation(SetOfIOPairs trainingSet, int maxK, double eps, double lambda, double micro) { double delta; Gradients deltaGradients = new Gradients(this); Gradients totalGradients = new Gradients(this); Gradients partialGradients = new Gradients(this); Console.WriteLine("setting up random weights and thresholds ..."); for (int i = NumberOfLayers() - 1; i >= 1; i--) { NeuralLayer currentLayer = GetLayer(i); for (int j = 0; j < currentLayer.NumberOfNeurons(); j++) { Neuron currentNeuron = currentLayer.GetNeuron(j) ; currentNeuron.Threshold = 2 * Random() - 1; for (int k = 0; k < currentNeuron.NumberOfInputs(); k++) { currentNeuron.GetInput(k).Weight = 2 * Random() - 1; } } } int currK = 0; double currE = double.PositiveInfinity; Console.WriteLine("entering adaptation loop ... (maxK = " + maxK + ")"); while (currK < maxK && currE > eps) { ComputeTotalGradient(totalGradients, partialGradients, trainingSet); for (int i = NumberOfLayers() - 1; i >= 1; i--) { NeuralLayer currentLayer = GetLayer(i); for (int j = 0; j < currentLayer.NumberOfNeurons(); j++) { Neuron currentNeuron = currentLayer.GetNeuron(j); delta = -lambda *totalGradients.GetThreshold(i, j) + micro * deltaGradients.GetThreshold(i, j); currentNeuron.Threshold += delta; deltaGradients.SetThreshold(i, j, delta); } for (int k = 0; k < currentLayer.NumberOfNeurons(); k++) { Neuron currentNeuron = currentLayer.GetNeuron(k); for (int l = 0; l < currentNeuron.NumberOfInputs(); l++) { delta = -lambda *totalGradients.GetWeight(i, k, l) + micro * deltaGradients.GetWeight(i, k, l); currentNeuron.GetInput(l).Weight += delta; deltaGradients.SetWeight(i, k, l, delta); } } } currE = totalGradients.GetGradientAbs(); currK++; if (currK % 25 == 0) { Console.WriteLine("currK=" + currK + " currE=" + currE); } } }