Ejemplo n.º 1
0
        public bool Train(List <double> input, List <double> idealOutput)
        {
            if ((input.Count != Layers.First().Size) || (idealOutput.Count != Layers.Last().Size))
            {
                return(false);
            }

            Dropout();

            Run(input);

            OutputLayer.InitDelta(idealOutput);
            foreach (var layer in HiddenLayers.Reverse())
            {
                layer.CalcDelta();
            }

            foreach (var layer in Layers.Skip(1))
            {
                layer.UpdateWeights();
            }

            ClearDropout();
            return(true);
        }
Ejemplo n.º 2
0
        private void BackPropagate(params double[] targets)
        {
            NumCalc++;

            int actual = targets.ToList().IndexOf(targets.Max());

            double[] outputs   = OutputLayer.Select(a => a.Value).ToArray();
            int      predicted = outputs.ToList().IndexOf(outputs.Max());

            if (actual == predicted)
            {
                Accuracy += 1;
            }

            int i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            HiddenLayers.Reverse();
            HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.CalculateGradient()));
            HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.UpdateWeights(LearnRate, Momentum)));
            HiddenLayers.Reverse();
            OutputLayer.AsParallel().ForAll(a => a.UpdateWeights(LearnRate, Momentum));

            i = 0;
            double error = OutputLayer.Sum(a => Math.Abs(a.CalculateError(targets[i++])));

            Error += error;

            if (NumCalc % 1000 == 0)
            {
                Console.WriteLine($"Error: {Error / 1000 / 10} NumCalc: {NumCalc} Accuracy: {Accuracy / 10.0}");
                Error    = 0;
                Accuracy = 0;
            }
        }
Ejemplo n.º 3
0
        //误差反向传播,先不要更新权重,先计算所有的误差传播,然后再更新权重
        public void BackPropagate(params double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateErrorAndGradient(targets[i++])); //计算输出层的误差
            TotalError = OutputLayer.Sum(a => Math.Abs(a.Error));
            HiddenLayers.Reverse();                                              //将隐藏层反转,从后往前反推

            //HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateErrorAndGradient()));
            foreach (List <Neuron> HiddenLayer in HiddenLayers)
            {
                Parallel.ForEach(HiddenLayer, a =>
                {
                    a.CalculateErrorAndGradient();
                });
            }
            //更新连接权重
            OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));
            //HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearnRate, Momentum)));
            foreach (List <Neuron> HiddenLayer in HiddenLayers)
            {
                Parallel.ForEach(HiddenLayer, a =>
                {
                    a.UpdateWeights(LearnRate, Momentum);
                });
            }

            HiddenLayers.Reverse();//将隐藏层反转回去
        }
Ejemplo n.º 4
0
        private void BackPropagate(params double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            HiddenLayers.Reverse();
            //HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateGradient()));
            HiddenLayers.ForEach(a =>
                                 Parallel.ForEach(a, (b) =>
            {
                b.CalculateGradient();
            })
                                 );
            //HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearnRate, Momentum)));
            HiddenLayers.ForEach(a =>
                                 Parallel.ForEach(a, (b) =>
            {
                b.UpdateWeights(LearnRate, Momentum);
            })
                                 );
            HiddenLayers.Reverse();
            //OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));
            Parallel.ForEach(OutputLayer, (on) =>
            {
                on.UpdateWeights(LearnRate, Momentum);
            });
        }
Ejemplo n.º 5
0
        //Obliczenie gradientów i zaktualizowanie wag
        private void BackPropagate(double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            HiddenLayers.Reverse();
            HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateGradient()));
            HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearningRate, Momentum)));
            HiddenLayers.Reverse();
            OutputLayer.ForEach(a => a.UpdateWeights(LearningRate, Momentum));
        }
Ejemplo n.º 6
0
        private void BackPropagation(params double[] goal)
        {
            var i = 0;

            ExitLayer.Neurons.ForEach(x => x.CalculateGradient(goal[i++]));
            HiddenLayers.Reverse();
            HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.CalculateGradient()));
            HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.UpdateWeight(LearningRate, Momentum)));
            HiddenLayers.Reverse();
            ExitLayer.Neurons.ForEach(x => x.UpdateWeight(LearningRate, Momentum));
        }
Ejemplo n.º 7
0
        private void SetHiddenLayerNeuronsErrors()
        {
            foreach (var layer in HiddenLayers.Reverse())
            {
                foreach (var neuron in layer.Neurons)
                {
                    var deltaSum = neuron.OutConnections.Sum(connection
                                                             => connection.Destination.Error * connection.Weight.Value);

                    neuron.Error = GetDerivative(neuron) * deltaSum;
                }
            }
        }