示例#1
0
 /// <summary>
 /// Creates matrices (for weights) and vectors (for biasses) initialized with 0 values coresponding to every computed layer of the NN.
 /// </summary>
 private static (Matrix <double>[] nablaW, Vector <double>[] nablaB) GenParamsZero(DeepNeuralNetwork nn)
 {
     Matrix <double>[] nablaW = new Matrix <double> [nn.ComputedLayers.Length];
     Vector <double>[] nablaB = new Vector <double> [nn.ComputedLayers.Length];
     for (int i = 0; i < nn.ComputedLayers.Length; ++i)
     {
         ComputedLayer layer = nn.ComputedLayers[i];
         nablaW[i] = new DenseMatrix(layer.Weights.RowCount, layer.Weights.ColumnCount);
         nablaB[i] = new DenseVector(layer.Biasses.Count);
     }
     return(nablaW, nablaB);
 }
示例#2
0
        /// <summary>
        /// Train the network using the given training set. The starting weights and biasses are the ones present in the network when the call to this function is made.
        /// </summary>
        public static void TrainWithBatch(DeepNeuralNetwork nn, IEnumerable <LabeledData> batch, int batchSize, double learningRate)
        {
            Matrix <double>[] nablaW;
            Vector <double>[] nablaB;
            (nablaW, nablaB) = GenParamsZero(nn);

            foreach (LabeledData trainingData in batch)
            {
                var delta = BackProp(nn, trainingData);
                for (int i = 0; i < nn.ComputedLayers.Length; ++i)
                {
                    nablaW[i] += delta.nablaW[i];
                    nablaB[i] += delta.nablaB[i];
                }
            }
            double ratio = learningRate / batchSize;

            for (int i = 0; i < nn.ComputedLayers.Length; ++i)
            {
                ComputedLayer layer = nn.ComputedLayers[i];
                layer.Weights = layer.Weights - ratio * nablaW[i];
                layer.Biasses = layer.Biasses - ratio * nablaB[i];
            }
        }