/// <summary> /// <para> /// Perform one pass of training. Adjust the weights based on the current state of the <see cref="Output"/> layer and the desired values. /// Use <see cref="FeedForward"/> to calculate the output values. /// </para> /// /// <para> /// Calculate the errors/losses of each layer (using <see cref="CalculateLoss(Vector)"/>) /// and then adjust the weights accordingly (using <see cref="NNOperations.CalculateDeltas(Layer, Layer, Vector, ActivationFunction, float)"/>). /// </para> /// </summary> /// <param name="desiredOutput">the desired output value of the network</param> /// <returns>the results</returns> public LayerResult[] AdjustWeights(Vector desiredOutput) { LayerResult[] results = CalculateLoss(desiredOutput); for (int i = results.Length - 1; i >= 0; i--) // Iterate over results backwards { if (i == 0) { break; } LayerResult L1R = results[i]; LayerResult L0R = results[i - 1]; // Get the values to adjust weights and biases Deltas L0deltas = NNOperations.CalculateDeltas(L0R.Layer, L1R.Layer, L1R.Loss, Properties.DerivativeActivation, Properties.LearningRate); // create new adjusted weights and biases Matrix nw = L0R.Layer.Weights + L0deltas.Weights; Vector nb = L0R.Layer.Biases + L0deltas.Biases; // Apply adjustments L0R.Layer.Weights.Values = nw.Values; L0R.Layer.Biases.Values = nb.Values; results[i - 1].Deltas = L0deltas; } return(results); }
/// <summary> /// /// </summary> /// <param name="desired"></param> /// <returns></returns> public LayerResult[] CalculateLoss(Vector desired) { Layer[] layers = AllLayers; Vector LossL1 = NNOperations.OutputLoss(Output, desired, Properties.LossFunction); LayerResult[] results = new LayerResult[LayerCount]; for (int i = layers.Length - 1; i >= 0; i--) { if (i == 0) { break; } Layer L1 = layers[i]; Layer L0 = layers[i - 1]; Vector LossL0 = NNOperations.PropagateLoss(LossL1, L0); LayerResult ResultL1 = new LayerResult(L1, LossL1); LayerResult ResultL0 = new LayerResult(L0, LossL0); results[i] = ResultL1; results[i - 1] = ResultL0; LossL1 = LossL0; } return(results); }
/// <summary> /// Calculate the results by feeding the input layer forward through the network. The resulting values can end up in the <see cref="Output"/> layer. /// </summary> public void FeedForward() { Layer current = Input; for (int i = 0; i < Hidden.Length; i++) { Layer next = Hidden[i]; next.Nodes = NNOperations.CalculateValues(current, Properties.ActivationFunction); current = next; } Output.Nodes = NNOperations.CalculateValues(current, Properties.ActivationFunction); }
/// <summary> /// Initialization. /// </summary> public void Initialize() { // Initialize layers Weights = new Matrix[Hidden.Length + 1]; Layer current = Input; for (int i = 0; i < Hidden.Length; i++) { Weights[i] = NNOperations.CreateWeights(current, Hidden[i]); current.Initialize(i, Weights[i]); current = Hidden[i]; } Weights[Hidden.Length] = NNOperations.CreateWeights(current, Output); current.Initialize(Hidden.Length, Weights[Hidden.Length]); Output.Initialize(Hidden.Length + 1, null); }