Exemple #1
0
        private void applyStep(GradStep step, DerivedLayer layer)
        {
            for (int i = 0; i < layer.weights.RowCount; i++)                       //iterate first by column because every column represents a neuron
            {
                for (int j = 0; j < layer.weights.ColumnCount; j++)                //each row represents the jth weight for each neuron
                {
                    layer.weights[i, j] -= learningFactor * step.weightStep[i, j]; //change each weight by its negative gradient
                }

                layer.biases[i] -= learningFactor * step.biasStep[i]; //change the bias by its negative gradient, one for each neuron in the layer
            }
        }
Exemple #2
0
 /// <summary>
 /// Creates a network that takes inputSize neurons
 /// </summary>
 /// <param name="inputSize"></param>
 /// <param name="hiddenSizes"></param>
 /// <param name="outputSize"></param>
 public Network(int inputSize, int[] hiddenSizes, int outputSize)
 {
     input = new Layer(new double[inputSize]); //input layer is your data
     if (hiddenSizes.Count() > 0)
     {
         hiddenLayers    = new DerivedLayer[hiddenSizes.Count()];   //where the stuff happens
         hiddenLayers[0] = new DerivedLayer(input, hiddenSizes[0]); //first hidden layer pulls from input
         for (int i = 1; i < hiddenSizes.Count(); i++)
         {
             hiddenLayers[i] = new DerivedLayer(hiddenLayers[i - 1], hiddenSizes[i]);        //then the next hidden layers pull from the hidden layer before them
         }
         output = new DerivedLayer(hiddenLayers[hiddenLayers.GetUpperBound(0)], outputSize); //the output is derived from the last hidden layer
     }
     else
     {
         output = new DerivedLayer(input, outputSize); //directly from input to output (probably sucks)
     }
 }