Beispiel #1
0
        void Initialize()
        {
            if (initialized)
            {
                return;
            }

            // Initialize weightedInputs
            weightedInputs = new List <float[]>(neuralNetAccessor.NumberOfLayers);
            for (int i = 0; i < neuralNetAccessor.NumberOfLayers; i++)
            {
                weightedInputs.Add(new float[neuralNetAccessor.NodesInLayer(i)]);
            }

            // Initialize activations
            activations = new List <float[]>(neuralNetAccessor.NumberOfLayers);
            for (int i = 0; i < neuralNetAccessor.NumberOfLayers; i++)
            {
                activations.Add(new float[neuralNetAccessor.NodesInLayer(i)]);
            }

            // Initialize deltas
            deltas = new List <float[]>(neuralNetAccessor.NumberOfLayers);
            for (int i = 0; i < neuralNetAccessor.NumberOfLayers; i++)
            {
                deltas.Add(new float[neuralNetAccessor.NodesInLayer(i)]);
            }

            initialized = true;
        }
Beispiel #2
0
        public void InitializeWeightsAndBiases()
        {
            SimpleRNG.SetSeedFromSystemTime();

            for (int layer = 0; layer < neuralNetAccessor.NumberOfLayers; layer++)
            {
                for (int node = 0; node < neuralNetAccessor.NodesInLayer(layer); node++)
                {
                    var sigmoid = neuralNetAccessor.GetSigmoid(layer, node);
                    sigmoid.Bias = (float)SimpleRNG.GetNormal(0, 1);
                    float standardDeviation = 1.0f / Mathf.Sqrt(sigmoid.Weights.Length);
                    for (int i = 0; i < sigmoid.Weights.Length; i++)
                    {
                        sigmoid.Weights[i] = (float)SimpleRNG.GetNormal(0, standardDeviation);
                    }
                }
            }
        }
Beispiel #3
0
 void AddRegularizationL1(NeuralNetAccessor costFunctionGradient, float regularizationRate)
 {
     for (int layer = 0; layer < costFunctionGradient.NumberOfLayers; layer++)
     {
         for (int node = 0; node < costFunctionGradient.NodesInLayer(layer); node++)
         {
             var costFunctionGradientWeights = costFunctionGradient.GetSigmoid(layer, node).Weights;
             var neuralNetWeights            = neuralNetAccessor.GetSigmoid(layer, node).Weights;
             for (int i = 0; i < costFunctionGradientWeights.Length; i++)
             {
                 costFunctionGradientWeights[i] += Mathf.Sign(neuralNetWeights[i]) * regularizationRate;
             }
         }
     }
 }
Beispiel #4
0
        void ComputeWeightGradients(int layer, float[] learningExampleInput, INeuralNetAccessor outputGradient)
        {
            int numberOfNodes   = neuralNetAccessor.NodesInLayer(layer);
            int numberOfWeights = neuralNetAccessor.WeightsOfNodeInLayer(layer);

            for (int node = 0; node < numberOfNodes; node++)
            {
                for (int weight = 0; weight < numberOfWeights; weight++)
                {
                    float a;
                    if (layer == 0)
                    {
                        a = learningExampleInput[weight];
                    }
                    else
                    {
                        a = activations[layer - 1][weight];
                    }
                    outputGradient.WeightAccessor[layer, node, weight] = a * outputGradient.BiasAccessor[layer, node];
                }
            }
        }