Exemple #1
0
 public void init(CY_NeuronLayer CY_NeuronLayer)
 {
     foreach (CY_Neuron neuron in CY_NeuronLayer.Neurons)
     {
         neuron.bias = GetRandom(min, max);
     }
 }
Exemple #2
0
 public CY_Neuron(CY_NeuronLayer parent)
 {
     this.input  = 0d;
     this.output = 0d;
     this.error  = 0d;
     this.bias   = 0d;
     this.parent = parent;
 }
Exemple #3
0
        public void init(CY_NeuronLayer CY_NeuronLayer)
        {
            int hiddenNeuronCount = 0;

            foreach (CY_NeuronToNeuron targetConnector in CY_NeuronLayer.to)
            {
                hiddenNeuronCount += targetConnector.TargetLayer.neurons.Length;
            }

            double factor = calcNguyenWidrow(CY_NeuronLayer.neurons.Length, hiddenNeuronCount);

            foreach (CY_Neuron neuron in CY_NeuronLayer.Neurons)
            {
                neuron.bias = GetRandom(-factor, factor);
            }
        }
        protected override void LearnSample(CY_NN_Dataset_Entry e, int currentIteration, int trainingEpochs)
        {
            int layerCount = layers.Count;

            inputLayer.SetInput(e.inputV);
            for (int i = 0; i < layerCount; i++)
            {
                layers[i].calcFire();
            }
            mse += (outputLayer as CY_NeuronLayer).SetErrors(e.outputV);
            for (int i = layerCount; i > 0;)
            {
                CY_NeuronLayer layer = layers[--i] as CY_NeuronLayer;
                if (layer != null)
                {
                    layer.calcError();
                }
            }
            for (int i = 0; i < layerCount; i++)
            {
                layers[i].Learn(currentIteration, trainingEpochs);
            }
        }
 public CY_NN_Backprop(CY_NeuronLayer inputLayer, CY_NeuronLayer outputLayer)
     : base(inputLayer, outputLayer, CY_Train_Method.Supervised)
 {
     this.mse = 0d;
 }