コード例 #1
0
        private void BackPropagate(params double[] targets)
        {
            NumCalc++;

            int actual = targets.ToList().IndexOf(targets.Max());

            double[] outputs   = OutputLayer.Select(a => a.Value).ToArray();
            int      predicted = outputs.ToList().IndexOf(outputs.Max());

            if (actual == predicted)
            {
                Accuracy += 1;
            }

            int i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            HiddenLayers.Reverse();
            HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.CalculateGradient()));
            HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.UpdateWeights(LearnRate, Momentum)));
            HiddenLayers.Reverse();
            OutputLayer.AsParallel().ForAll(a => a.UpdateWeights(LearnRate, Momentum));

            i = 0;
            double error = OutputLayer.Sum(a => Math.Abs(a.CalculateError(targets[i++])));

            Error += error;

            if (NumCalc % 1000 == 0)
            {
                Console.WriteLine($"Error: {Error / 1000 / 10} NumCalc: {NumCalc} Accuracy: {Accuracy / 10.0}");
                Error    = 0;
                Accuracy = 0;
            }
        }
コード例 #2
0
ファイル: Network.cs プロジェクト: NNordhaus/Neural_Network
        private void BackPropagate(params double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            HiddenLayers.Reverse();
            //HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateGradient()));
            HiddenLayers.ForEach(a =>
                                 Parallel.ForEach(a, (b) =>
            {
                b.CalculateGradient();
            })
                                 );
            //HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearnRate, Momentum)));
            HiddenLayers.ForEach(a =>
                                 Parallel.ForEach(a, (b) =>
            {
                b.UpdateWeights(LearnRate, Momentum);
            })
                                 );
            HiddenLayers.Reverse();
            //OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));
            Parallel.ForEach(OutputLayer, (on) =>
            {
                on.UpdateWeights(LearnRate, Momentum);
            });
        }
コード例 #3
0
ファイル: Network.cs プロジェクト: PrzChodor/ProjektSSI
        //Obliczenie wartości wyjściowej sieci neuronowej dla pojedyńczego elementu
        private void ForwardPropagate(double[] inputs)
        {
            var i = 0;

            InputLayer.ForEach(a => a.Value = inputs[i++]);
            HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateValue()));
            OutputLayer.ForEach(a => a.CalculateValue());
        }
コード例 #4
0
        private void ForwardPropagate(params double[] inputs)
        {
            int i = 0;

            InputLayer.ForEach(a => a.Value = inputs[i++]);
            HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.CalculateValue()));
            OutputLayer.AsParallel().ForAll(a => a.CalculateValue());
        }
コード例 #5
0
    private IEnumerator UpdateWeights()
    {
        OutputLayer.ForEach(n => n.InputSynapses.ForEach(s => s.Weight = s.WeightDelta));
        HiddenLayers.ForEach(l => l.ForEach(n => n.InputSynapses.ForEach(s => s.Weight = s.WeightDelta)));
        yield return(new WaitForSeconds(1));

        Debug.Log("End BP");
    }
コード例 #6
0
        public void BuildFromDNA(string dna)
        {
            string[] parts = dna.Split(new[] { "||" }, StringSplitOptions.None);

            int inputCount            = int.Parse(parts[0]);
            int outputCount           = int.Parse(parts[1]);
            int depth                 = 0;
            int hiddenNeuronsPerLayer = 0;

            if (parts.Length > 2)
            {
                depth = int.Parse(parts[2]);
            }
            if (parts.Length > 3)
            {
                hiddenNeuronsPerLayer = int.Parse(parts[3]);
            }

            InputLayer = new NeuronsLayer(0, inputCount);

            InputLayer.BuildDendrites(1, 1);

            HiddenLayers = Enumerable.Range(1, depth).Select(i => new NeuronsLayer(i, hiddenNeuronsPerLayer)).ToList();

            OutputLayer = new NeuronsLayer(1 + depth, outputCount);

            HiddenLayers.ForEach(h => h.BuildDendrites(h == HiddenLayers.First() ? inputCount : hiddenNeuronsPerLayer, 0));

            OutputLayer.BuildDendrites(hiddenNeuronsPerLayer, 0);

            if (parts.Length > 4)
            {
                int weightCounter = 4;

                foreach (NeuronsLayer nl in HiddenLayers)
                {
                    foreach (Neuron n in nl.Neurons)
                    {
                        foreach (Dendrite d in n.Dendrites)
                        {
                            d.Weight = double.Parse(parts[weightCounter++]);
                        }
                    }
                }
                foreach (Neuron n in OutputLayer.Neurons)
                {
                    foreach (Dendrite d in n.Dendrites)
                    {
                        d.Weight = double.Parse(parts[weightCounter++]);
                    }
                }
            }
            AllLayers = new List <NeuronsLayer>();
            AllLayers.Add(InputLayer);
            AllLayers.AddRange(HiddenLayers);
            AllLayers.Add(OutputLayer);
        }
コード例 #7
0
ファイル: Network.cs プロジェクト: PrzChodor/ProjektSSI
        //Obliczenie gradientów i zaktualizowanie wag
        private void BackPropagate(double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            HiddenLayers.Reverse();
            HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateGradient()));
            HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearningRate, Momentum)));
            HiddenLayers.Reverse();
            OutputLayer.ForEach(a => a.UpdateWeights(LearningRate, Momentum));
        }
コード例 #8
0
        private void BackPropagation(params double[] goal)
        {
            var i = 0;

            ExitLayer.Neurons.ForEach(x => x.CalculateGradient(goal[i++]));
            HiddenLayers.Reverse();
            HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.CalculateGradient()));
            HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.UpdateWeight(LearningRate, Momentum)));
            HiddenLayers.Reverse();
            ExitLayer.Neurons.ForEach(x => x.UpdateWeight(LearningRate, Momentum));
        }
コード例 #9
0
        private void ForwardPropagation(params double[] entryValue)
        {
            var i = 0;

            for (int j = 0; j < EntryLayer.Neurons.Count; j++)
            {
                EntryLayer.Neurons[i].Value = entryValue[i++];
            }
            HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.CalculateEntry()));
            ExitLayer.Neurons.ForEach(x => x.CalculateEntry());
        }
コード例 #10
0
ファイル: NeuralNet.cs プロジェクト: rhoninn11/Basket_nn
    private void BackPropagate(params double[] targets)
    {
        var i = 0;

        OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
        foreach (var layer in HiddenLayers.AsEnumerable <List <Neuron> >().Reverse())
        {
            layer.ForEach(a => a.CalculateGradient());
        }
        HiddenLayers.ForEach(hl => hl.ForEach(n => n.UpdateWeights(LearnRate, Momentum)));
        OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));
    }
コード例 #11
0
ファイル: Network.cs プロジェクト: NNordhaus/Neural_Network
 private void ForwardPropagate(bool dropOut, params double[] inputs)
 {
     //InputLayer.ForEach(a => a.Value = inputs[a.Index]);
     Parallel.ForEach(InputLayer, (a) =>
     {
         a.Value = inputs[a.Index];
     });
     //HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateValue(dropOut)));
     HiddenLayers.ForEach(a =>
                          Parallel.ForEach(a, (b) =>
     {
         b.CalculateValue(dropOut);
     })
                          );
     OutputLayer.ForEach(a => a.CalculateValue(false));
 }
コード例 #12
0
 private void UpdateHiddenLayerOutput()
 {
     HiddenLayers.ForEach(h => h.UpdateNeuronsOutput());
 }
コード例 #13
0
 private void InitHiddenLayersWithRandomValues()
 {
     HiddenLayers.ForEach(h => InitHiddenLayerWithRandomValues(h));
 }
コード例 #14
0
 public double[] PredictionFor(double[] inputs, ParallelOptions parallelOptions)
 {
     InputLayer.SetInputLayerOutputs(inputs);
     HiddenLayers.ForEach(layer => layer.Neurons.ParallelForEach(a => a.CalculateOutput(), parallelOptions));
     return(OutputLayer.Neurons.Select(a => a.CalculateOutput()).ToArray());
 }
コード例 #15
0
 public virtual void ResetNetwork()
 {
     OutputLayer.ForEach(neuron => neuron.ResetNeuron());
     HiddenLayers.ForEach(layer => layer.ForEach(neuron => neuron.ResetNeuron()));
 }