예제 #1
0
        public void Initialize(CostFuntionKind costFunction, params int[] layersNodes)
        {
            if (initialized)
            {
                throw new System.InvalidOperationException();
            }

            if (layersNodes.Length < 2)
            {
                throw new System.ArgumentException();
            }

            inputSize  = layersNodes[0];
            outputSize = layersNodes[layersNodes.Length - 1];

            neuralNetAccessor = new NeuralNetAccessor(layersNodes);

            // Back propagation
            this.costFunction = costFunction;

            ///
            InitializeWeightsAndBiases();

            //
            initialized = true;
        }
예제 #2
0
 void InitializeLearning()
 {
     costFunctionGradient  = new NeuralNetAccessor(neuralNetAccessor.InitializedParameters);
     backPropagationOutput = new NeuralNetAccessor(neuralNetAccessor.InitializedParameters);
     InitializeBackPropagation();
     initializedLearning = true;
 }
예제 #3
0
 void AddRegularizationL1(NeuralNetAccessor costFunctionGradient, float regularizationRate)
 {
     for (int layer = 0; layer < costFunctionGradient.NumberOfLayers; layer++)
     {
         for (int node = 0; node < costFunctionGradient.NodesInLayer(layer); node++)
         {
             var costFunctionGradientWeights = costFunctionGradient.GetSigmoid(layer, node).Weights;
             var neuralNetWeights            = neuralNetAccessor.GetSigmoid(layer, node).Weights;
             for (int i = 0; i < costFunctionGradientWeights.Length; i++)
             {
                 costFunctionGradientWeights[i] += Mathf.Sign(neuralNetWeights[i]) * regularizationRate;
             }
         }
     }
 }
예제 #4
0
        void AddRegularization(RegularizationMethodKind regularizationMethod, NeuralNetAccessor costFunctionGradient, float regularizationRate)
        {
            switch (regularizationMethod)
            {
            case RegularizationMethodKind.None:
                break;

            case RegularizationMethodKind.L2:
                AddRegularizationL2(costFunctionGradient, regularizationRate);
                break;

            case RegularizationMethodKind.L1:
                AddRegularizationL1(costFunctionGradient, regularizationRate);
                break;

            default:
                throw new System.NotImplementedException();
            }
        }
예제 #5
0
 public BackPropagationQuadratic(NeuralNetAccessor neuralNetAccessor)
 {
     this.neuralNetAccessor = neuralNetAccessor;
     Initialize();
 }
예제 #6
0
 public BackPropagationCrossEntropy(NeuralNetAccessor neuralNetAccessor)
 {
     this.neuralNetAccessor = neuralNetAccessor;
     Initialize();
 }