예제 #1
0
        public void AddBias()
        {
            Neuron lastNeuron = Neurons.Last();
            Neuron newNeuron  = new Neuron(
                lastNeuron.Dendrites.Count(),
                lastNeuron.ID + 1);

            newNeuron.Value = 1;
            newNeuron.Bias  = true;
            Neurons.Add(newNeuron);
        }
예제 #2
0
        /// <summary>
        /// Perceptron is trained with the help of a teacher using the method of back propagation of an error using deltas
        /// </summary>
        /// <param name="ideal">The correct output value</param>
        public (Vector output, Vector error) Learn(Vector input, Vector ideal)
        {
            CheckIdealVector(ideal, Neurons.Last());
            var actual = Run(input);
            var error  = new Vector(actual.Length);

            for (var i = 0; i < ideal.Length; i++)
            {
                ideal[i] = Activation.Func(ideal[i]);
                error[i] = GetError(actual[i], ideal[i]);
            }
            LearnWithBackPropagationError(actual, ideal);
            SetNextEpoch();
            return(actual, error);
        }
예제 #3
0
 /// <summary>
 /// Calculates the output vector of the neural network
 /// </summary>
 public Vector Run(Vector input)
 {
     InitializeNeuronsWithInput(input);
     for (var i = 0; i < Neurons.Length - 1; i++)
     {
         for (int j = 0; j < Neurons[i + 1].Length; j++)
         {
             for (int k = 0; k < Neurons[i].Length; k++)
             {
                 Neurons[i + 1][j] += Weights[i][j][k] * Neurons[i][k];
             }
             Neurons[i + 1][j] = Activation.Func(Neurons[i + 1][j]);
         }
     }
     return(Neurons.Last());
 }
예제 #4
0
        private void AddWeights(Layer prevLayer, int index)
        {
            List <Neuron> lastNeurons = new List <Neuron>();
            List <double> weights     = new List <double>();
            List <double> inputs      = new List <double>();

            for (int i = 0; i < NumOfPrevious; i++)
            {
                for (int j = 0; j < NumOfPrevious; j++)
                {
                    weights.Add(0.5);
                    inputs.Add(prevLayer.Neurons.Find(x => x.MapX == i && x.MapY == j).Output);
                    lastNeurons.Add(prevLayer.Neurons.Find(x => x.MapX == i && x.MapY == j));
                }
            }
            Neurons.Add(new Neuron(inputs, weights, _NeuronType, 0, index));
            Neurons.Last().LastNeurons = lastNeurons;
        }
예제 #5
0
        protected void OverlayMatrix(int shiftI, int shiftJ, Layer prevLayer, int step)
        {
            int           wi = 0; int wj = 0;
            List <Neuron> lastNeurons = new List <Neuron>();
            List <double> weights     = new List <double>();
            List <double> inputs      = new List <double>();

            for (int i = 0 + shiftI; i < Weights.Length + shiftI; i++)
            {
                for (int j = 0 + shiftJ; j < Weights.Length + shiftJ; j++)
                {
                    weights.Add(Weights[wi][wj]);
                    inputs.Add(prevLayer.Neurons.Find(x => x.MapX == i && x.MapY == j).Output);
                    lastNeurons.Add(prevLayer.Neurons.Find(x => x.MapX == i && x.MapY == j));
                    wj++;
                }
                wj = 0; wi++;
            }
            Neurons.Add(new Neuron(inputs, weights, _NeuronType, shiftI / step, shiftJ / step));
            Neurons.Last().LastNeurons = lastNeurons;
        }
예제 #6
0
 public T[] Calc()
 {
     return(Neurons.Last().Select(n => n.Calc()).ToArray());
 }