Ejemplo n.º 1
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="inputs"></param>
        /// <returns></returns>
        public double[] Compute(double[] inputs)
        {
            // local variable to avoid mutlithread conflicts
            double[] output = new double[NbNeurons];

            // compute each neuron
            for (int i = 0; i < Neurons.Length; i++)
            {
                output[i] = Neurons[i].Compute(inputs);
            }

            // assign output property as well (works correctly for single threaded usage)
            if (Equation is SoftMaxActivation)
            {
                (Equation as SoftMaxActivation).CalculerDiviseur(output);
                //on divise par la somme calculer
                for (int i = 0; i < Neurons.Length; i++)
                {
                    output[i] = Neurons[i].SetOutput(output[i] / (Equation as SoftMaxActivation).Diviseur);
                }
                //pour reset le diviseur
                Equation = new SoftMaxActivation();
            }
            this.Output = output;

            return(output);
        }
Ejemplo n.º 2
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="precedent"></param>
 /// <returns></returns>
 public double[] Compute(Layer precedent)
 {
     //pareil que lautre mais on peut envoyer un layer
     double[] output = new double[NbNeurons];
     for (int i = 0; i < Neurons.Length; i++)
     {
         output[i] = Neurons[i].Compute(precedent.Output);
     }
     if (Equation is SoftMaxActivation)
     {
         (Equation as SoftMaxActivation).CalculerDiviseur(output);
         //on divise par la somme calculer
         for (int i = 0; i < Neurons.Length; i++)
         {
             output[i] = Neurons[i].SetOutput(output[i] / (Equation as SoftMaxActivation).Diviseur);
         }
         //pour reset le diviseur
         Equation = new SoftMaxActivation();
     }
     this.Output = output;
     return(output);
 }