Пример #1
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="desired"></param>
        /// <returns></returns>
        public LayerResult[] CalculateLoss(Vector desired)
        {
            Layer[] layers = AllLayers;
            Vector  LossL1 = NNOperations.OutputLoss(Output, desired, Properties.LossFunction);

            LayerResult[] results = new LayerResult[LayerCount];

            for (int i = layers.Length - 1; i >= 0; i--)
            {
                if (i == 0)
                {
                    break;
                }

                Layer L1 = layers[i];
                Layer L0 = layers[i - 1];

                Vector LossL0 = NNOperations.PropagateLoss(LossL1, L0);

                LayerResult ResultL1 = new LayerResult(L1, LossL1);
                LayerResult ResultL0 = new LayerResult(L0, LossL0);
                results[i]     = ResultL1;
                results[i - 1] = ResultL0;

                LossL1 = LossL0;
            }

            return(results);
        }
Пример #2
0
        /// <summary>
        /// <para>
        /// Perform one pass of training. Adjust the weights based on the current state of the <see cref="Output"/> layer and the desired values.
        /// Use <see cref="FeedForward"/> to calculate the output values.
        /// </para>
        ///
        /// <para>
        /// Calculate the errors/losses of each layer (using <see cref="CalculateLoss(Vector)"/>)
        /// and then adjust the weights accordingly (using <see cref="NNOperations.CalculateDeltas(Layer, Layer, Vector, ActivationFunction, float)"/>).
        /// </para>
        /// </summary>
        /// <param name="desiredOutput">the desired output value of the network</param>
        /// <returns>the results</returns>
        public LayerResult[] AdjustWeights(Vector desiredOutput)
        {
            LayerResult[] results = CalculateLoss(desiredOutput);

            for (int i = results.Length - 1; i >= 0; i--) // Iterate over results backwards
            {
                if (i == 0)
                {
                    break;
                }

                LayerResult L1R = results[i];
                LayerResult L0R = results[i - 1];

                // Get the values to adjust weights and biases
                Deltas L0deltas = NNOperations.CalculateDeltas(L0R.Layer, L1R.Layer, L1R.Loss, Properties.DerivativeActivation, Properties.LearningRate);

                // create new adjusted weights and biases
                Matrix nw = L0R.Layer.Weights + L0deltas.Weights;
                Vector nb = L0R.Layer.Biases + L0deltas.Biases;

                // Apply adjustments
                L0R.Layer.Weights.Values = nw.Values;
                L0R.Layer.Biases.Values  = nb.Values;

                results[i - 1].Deltas = L0deltas;
            }

            return(results);
        }