Esempio n. 1
0
        /// <summary>
        /// Computes all neurons
        /// </summary>
        /// <param name="prev">Previous layer neurons</param>
        public void ComputeNodes(Layer prev)
        {
            StreamWriter writer = new StreamWriter("E:/log.txt", true);

            if (loggingEnabled)
            {
                writer.WriteLine("---- Layer Compute ----");
            }

            for (int i = 0; i < nodes.Length; i++)
            {
                this.nodes[i] = 0;
                for (int j = 0; j < prev.nodes.Length; j++)
                {
                    nodes[i] += Sigmoid.Activation(prev.nodes[j]) * weights[j, i];
                }
                if (loggingEnabled)
                {
                    writer.WriteLine($"Node№ {i} : value = {nodes[i]} + bias {biases[i]}");
                }
                nodes[i] += biases[i];
                //nodes[i] = Sigmoid.Activation(nodes[i]); // Not needed. Sigmoid activation applied upon next neuron call
            }
            writer.Flush();
            writer.Close();
        }
Esempio n. 2
0
 /// <summary>
 /// Computes all neurons in all layers.
 /// </summary>
 /// <param name="input">Input vector</param>
 /// <returns>Vector of probabilities</returns>
 public double[] Compute(double[] input)
 {
     double[] output = new double[outputLayer.number];
     inputLayer.nodes = input;
     hiddenLayers[0].ComputeNodes(inputLayer);
     for (int i = 1; i < hiddenLayers.Count; i++)
     {
         hiddenLayers[i].ComputeNodes(hiddenLayers[i - 1]);
     }
     outputLayer.ComputeNodes(hiddenLayers[hiddenLayers.Count - 1]);
     for (int i = 0; i < outputLayer.number; i++)
     {
         output[i] = Sigmoid.Activation(outputLayer.nodes[i]);
     }
     return(output);
 }
Esempio n. 3
0
        /// <summary>
        /// Computes changes by backpropagating errors
        /// </summary>
        /// <param name="errors">Vector of error values</param>
        /// <param name="speed">Speed modifier</param>
        /// <param name="prevLayer">Previous layer neurons</param>
        /// <returns>Errors of previous layer</returns>
        public virtual double[] Propagate(double[] errors, double speed, Layer prevLayer)
        {
            StreamWriter writer = new StreamWriter("E:/log.txt", true);

            if (loggingEnabled)
            {
                writer.WriteLine("---- Layer Error Correction ----");
            }

            double[] prevErrors = new double[prevLayer.number];
            for (int i = 0; i < this.number; i++)
            {
                for (int j = 0; j < prevLayer.number; j++)
                {
                    prevErrors[i] += errors[i] * weights[j, i];
                }

                if (loggingEnabled)
                {
                    writer.WriteLine($"Eror {i} : value = {prevErrors[i]}");
                }
                prevErrors[i] = Sigmoid.ActivationDerivative(prevErrors[i]);
                if (loggingEnabled)
                {
                    writer.WriteLine($"Error deactivated {i} : value = {prevErrors[i]}");
                }

                for (int j = 0; j < prevLayer.number; j++)
                {
                    weightCorrection[j, i] = speed * prevErrors[i] * Sigmoid.Activation(prevLayer.nodes[j]);
                    if (loggingEnabled)
                    {
                        //writer.WriteLine($"Weight correction [{i},{j}] : value = { weightCorrection[j, i]}");
                    }
                }
                biasCorrecton[i] = prevErrors[i] * speed;
                if (loggingEnabled)
                {
                    writer.WriteLine($"Bias correction {i} : value = {biasCorrecton[i]}");
                }
            }
            writer.Flush();
            writer.Close();
            return(prevErrors);
        }
Esempio n. 4
0
        /// <summary>
        /// Computes changes by backpropagating errors.
        /// </summary>
        /// <param name="errors">Vector of error values</param>
        /// <param name="speed">Speed modifier</param>
        /// <param name="prevLayer">Previous layer neurons</param>
        /// <returns>Errors of previous layer</returns>
        public override double[] Propagate(double[] errors, double speed, Layer prevLayer)
        {
            double[] prevErrors = new double[prevLayer.number];
            for (int i = 0; i < this.number; i++)
            {
                for (int j = 0; j < prevLayer.number; j++)
                {
                    prevErrors[i] += errors[i] * weights[j, i];
                }

                for (int j = 0; j < prevLayer.number; j++)
                {
                    weightCorrection[j, i] = speed * errors[i] * Sigmoid.Activation(prevLayer.nodes[j]);
                }
                biasCorrecton[i] = speed * errors[i];
            }
            return(errors);
        }
Esempio n. 5
0
        /// <summary>
        /// Computes all neurons in all layers, propagetes error.
        /// </summary>
        /// <param name="input">Input vector</param>
        /// <param name="expected">Vector of expected result probabilities</param>
        /// <returns>Vector of probabilities</returns>
        public double[] Learn(double[] input, double[] expected)
        {
            double[] result = this.Compute(input);
            double[] errors = new double[outputLayer.number];
            for (int i = 0; i < result.Length; i++)
            {
                errors[i] = (expected[i] - result[i]) * Sigmoid.ActivationDerivative(outputLayer.nodes[i]);
            }
            double[] prevErrors = outputLayer.Propagate(errors, speed, hiddenLayers.Last());
            prevErrors = hiddenLayers.Last().Propagate(errors, speed, outputLayer);
            for (int i = hiddenLayers.Count - 1; i >= 0; i--)
            {
                prevErrors = hiddenLayers[i].Propagate(prevErrors, speed, hiddenLayers[i + 1]);
            }

            foreach (var item in hiddenLayers)
            {
                item.Apply();
            }
            outputLayer.Apply();
            return(result);
        }