void AddRegularizationL1(NeuralNetAccessor costFunctionGradient, float regularizationRate) { for (int layer = 0; layer < costFunctionGradient.NumberOfLayers; layer++) { for (int node = 0; node < costFunctionGradient.NodesInLayer(layer); node++) { var costFunctionGradientWeights = costFunctionGradient.GetSigmoid(layer, node).Weights; var neuralNetWeights = neuralNetAccessor.GetSigmoid(layer, node).Weights; for (int i = 0; i < costFunctionGradientWeights.Length; i++) { costFunctionGradientWeights[i] += Mathf.Sign(neuralNetWeights[i]) * regularizationRate; } } } }
public void InitializeWeightsAndBiases() { SimpleRNG.SetSeedFromSystemTime(); for (int layer = 0; layer < neuralNetAccessor.NumberOfLayers; layer++) { for (int node = 0; node < neuralNetAccessor.NodesInLayer(layer); node++) { var sigmoid = neuralNetAccessor.GetSigmoid(layer, node); sigmoid.Bias = (float)SimpleRNG.GetNormal(0, 1); float standardDeviation = 1.0f / Mathf.Sqrt(sigmoid.Weights.Length); for (int i = 0; i < sigmoid.Weights.Length; i++) { sigmoid.Weights[i] = (float)SimpleRNG.GetNormal(0, standardDeviation); } } } }