public void feedForward(Layer prevLayer) { input = 0.0; for (int n = 0; n < prevLayer.neurons.Count(); n++) { input += prevLayer.neurons[n].Output * prevLayer.neurons[n].getConnection(myIndex).Weight; } output = MathUtils.TransferFunction(input); }
public Net(List<int> topology) { int numLayers = topology.Count(); Random rand = new Random(); for (int layerNum = 0; layerNum < numLayers; layerNum++) { int numOutputs; if (layerNum == numLayers - 1) numOutputs = 0; else numOutputs = topology[layerNum + 1]; //Console.WriteLine("Layer number {0}", layerNum); Layer layer = new Layer(topology[layerNum], numOutputs, rand); m_layers.Add(layer); } nrInputs = topology[0]; nrOutputs = topology[numLayers - 1]; }
private double sumDOW(Layer nexLayer) { double sum = 0.0; //Sum our contributions of the errors at the nodes we feed for (int n = 0; n < nexLayer.neurons.Count() - 1; n++) { sum += m_outputWeights[n].Weight * nexLayer.neurons[n].m_gradient; } return sum; }
public void calcHiddenGradient(Layer nextLayer) { double dow = sumDOW(nextLayer); m_gradient = dow * MathUtils.TransferFunctionDerivative(input); }
public void updateInputWeights(Layer prevLayer) { for (int n = 0; n < prevLayer.neurons.Count(); n++) { Neuron neuron = prevLayer.neurons[n]; double oldDeltaWeight = neuron.getConnection(myIndex).DeltaWeight; //eta = overall learning rate //alpha = momentum -> adds a fraction of the previous deltaweight double newDeltaWeight = eta * neuron.Input * m_gradient + alpha * oldDeltaWeight; neuron.m_outputWeights[myIndex].DeltaWeight = newDeltaWeight; neuron.m_outputWeights[myIndex].Weight += newDeltaWeight; } }