public void Initialize(CostFuntionKind costFunction, params int[] layersNodes) { if (initialized) { throw new System.InvalidOperationException(); } if (layersNodes.Length < 2) { throw new System.ArgumentException(); } inputSize = layersNodes[0]; outputSize = layersNodes[layersNodes.Length - 1]; neuralNetAccessor = new NeuralNetAccessor(layersNodes); // Back propagation this.costFunction = costFunction; /// InitializeWeightsAndBiases(); // initialized = true; }
void InitializeLearning() { costFunctionGradient = new NeuralNetAccessor(neuralNetAccessor.InitializedParameters); backPropagationOutput = new NeuralNetAccessor(neuralNetAccessor.InitializedParameters); InitializeBackPropagation(); initializedLearning = true; }
void AddRegularizationL1(NeuralNetAccessor costFunctionGradient, float regularizationRate) { for (int layer = 0; layer < costFunctionGradient.NumberOfLayers; layer++) { for (int node = 0; node < costFunctionGradient.NodesInLayer(layer); node++) { var costFunctionGradientWeights = costFunctionGradient.GetSigmoid(layer, node).Weights; var neuralNetWeights = neuralNetAccessor.GetSigmoid(layer, node).Weights; for (int i = 0; i < costFunctionGradientWeights.Length; i++) { costFunctionGradientWeights[i] += Mathf.Sign(neuralNetWeights[i]) * regularizationRate; } } } }
void AddRegularization(RegularizationMethodKind regularizationMethod, NeuralNetAccessor costFunctionGradient, float regularizationRate) { switch (regularizationMethod) { case RegularizationMethodKind.None: break; case RegularizationMethodKind.L2: AddRegularizationL2(costFunctionGradient, regularizationRate); break; case RegularizationMethodKind.L1: AddRegularizationL1(costFunctionGradient, regularizationRate); break; default: throw new System.NotImplementedException(); } }
public BackPropagationQuadratic(NeuralNetAccessor neuralNetAccessor) { this.neuralNetAccessor = neuralNetAccessor; Initialize(); }
public BackPropagationCrossEntropy(NeuralNetAccessor neuralNetAccessor) { this.neuralNetAccessor = neuralNetAccessor; Initialize(); }