コード例 #1
0
ファイル: Network.cs プロジェクト: jeadean/HuNN
        //误差反向传播,先不要更新权重,先计算所有的误差传播,然后再更新权重
        public void BackPropagate(params double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateErrorAndGradient(targets[i++])); //计算输出层的误差
            TotalError = OutputLayer.Sum(a => Math.Abs(a.Error));
            HiddenLayers.Reverse();                                              //将隐藏层反转,从后往前反推

            //HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateErrorAndGradient()));
            foreach (List <Neuron> HiddenLayer in HiddenLayers)
            {
                Parallel.ForEach(HiddenLayer, a =>
                {
                    a.CalculateErrorAndGradient();
                });
            }
            //更新连接权重
            OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));
            //HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearnRate, Momentum)));
            foreach (List <Neuron> HiddenLayer in HiddenLayers)
            {
                Parallel.ForEach(HiddenLayer, a =>
                {
                    a.UpdateWeights(LearnRate, Momentum);
                });
            }

            HiddenLayers.Reverse();//将隐藏层反转回去
        }
コード例 #2
0
        internal async Task InitTrain()
        {
            await Reset();

            CurrentDemo.TrainingEnded -= TrainingEnded;
            CurrentDemo.TrainingEnded += TrainingEnded;

            if (string.IsNullOrWhiteSpace(HiddenLayers) == false)
            {
                CurrentDemo.HiddenLayers = HiddenLayers.Split(',').Select(l => int.Parse(l)).ToArray();
            }
            else
            {
                CurrentDemo.HiddenLayers = new int[] { };
            }
            CurrentDemo.Speed        = SpeedValue;
            CurrentDemo.LearningRate = LearningRate;
            CurrentDemo.TargetError  = TargetError;
            CurrentDemo.CreateNeuralNetwork();
            TotalSteps = CurrentDemo.TotalSteps;
            TotalError = CurrentDemo.TotalError;

            Input        = string.Join(",", CurrentDemo.TrainingSet[0]);
            Output       = string.Join(",", CurrentDemo.NeuralNetwork.Update(CurrentDemo.TrainingSet[0]));
            TargetOutput = string.Join(",", CurrentDemo.Targets[0]);

            var scene = new NeuralNetworkScene(CurrentDemo.NeuralNetwork);
            await _game.Start(scene);
        }
コード例 #3
0
ファイル: Network.cs プロジェクト: NNordhaus/Neural_Network
        private void BackPropagate(params double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            HiddenLayers.Reverse();
            //HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateGradient()));
            HiddenLayers.ForEach(a =>
                                 Parallel.ForEach(a, (b) =>
            {
                b.CalculateGradient();
            })
                                 );
            //HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearnRate, Momentum)));
            HiddenLayers.ForEach(a =>
                                 Parallel.ForEach(a, (b) =>
            {
                b.UpdateWeights(LearnRate, Momentum);
            })
                                 );
            HiddenLayers.Reverse();
            //OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));
            Parallel.ForEach(OutputLayer, (on) =>
            {
                on.UpdateWeights(LearnRate, Momentum);
            });
        }
コード例 #4
0
        protected void CreateNeurons(int inputSize, int[] hiddenSizes, int outputSize)
        {
            for (var i = 0; i < inputSize; i++)
            {
                InputLayer.Add(new Neuron());
            }

            var firstHiddenLayer = new List <Neuron>();

            for (var i = 0; i < hiddenSizes[0]; i++)
            {
                firstHiddenLayer.Add(new Neuron(InputLayer, HiddenActivationType));
            }

            HiddenLayers.Add(firstHiddenLayer);

            for (var i = 1; i < hiddenSizes.Length; i++)
            {
                var hiddenLayer = new List <Neuron>();
                for (var j = 0; j < hiddenSizes[i]; j++)
                {
                    hiddenLayer.Add(new Neuron(HiddenLayers[i - 1], HiddenActivationType));
                }
                HiddenLayers.Add(hiddenLayer);
            }

            for (var i = 0; i < outputSize; i++)
            {
                OutputLayer.Add(new Neuron(HiddenLayers.Last(), OutputActivationType));
            }
        }
コード例 #5
0
        public void AddNeuron()
        {
            if (!AllSynapses.Any())
            {
                AddSynapse();
                return;
            }
            //Debug.Log("Poczatek add neuron");
            int     tmp    = RandomGenerator.Next(AllSynapses.Count);
            Synapse oldSyn = AllSynapses.ToList()[tmp].Value;

            AllSynapses.Remove(oldSyn.InnovationNo);
            oldSyn.InputNeuron.OutputSynapses.Remove(oldSyn);
            oldSyn.OutputNeuron.InputSynapses.Remove(oldSyn);
            Neuron neuron = new Neuron(NeuronInnovationNo);


            Synapse newSyn1 = new Synapse(oldSyn.InputNeuron, neuron, SynapseInnovationNo);

            newSyn1.Weight = 1;
            Synapse newSyn2 = new Synapse(neuron, oldSyn.OutputNeuron, SynapseInnovationNo);

            newSyn2.Weight = oldSyn.Weight;

            HiddenLayers.Add(neuron.InnovationNo, neuron);
            AllSynapses.Add(newSyn1.InnovationNo, newSyn1);
            AllSynapses.Add(newSyn2.InnovationNo, newSyn2);
        }
コード例 #6
0
        public bool Train(List <double> input, List <double> idealOutput)
        {
            if ((input.Count != Layers.First().Size) || (idealOutput.Count != Layers.Last().Size))
            {
                return(false);
            }

            Dropout();

            Run(input);

            OutputLayer.InitDelta(idealOutput);
            foreach (var layer in HiddenLayers.Reverse())
            {
                layer.CalcDelta();
            }

            foreach (var layer in Layers.Skip(1))
            {
                layer.UpdateWeights();
            }

            ClearDropout();
            return(true);
        }
コード例 #7
0
    public object Clone()
    {
        var clone        = Of(InputLayer.ColsCount, OutputLayer.ColsCount, ActivationsFunctions);
        var cloneWeights = new List <Matrix>();

        foreach (var weight in Weights)
        {
            var currentWeight = new Matrix(weight.RowsCount, weight.ColsCount);

            for (var rowI = 0; rowI < currentWeight.RowsCount; rowI++)
            {
                for (var colJ = 0; colJ < currentWeight.ColsCount; colJ++)
                {
                    currentWeight[rowI, colJ] = weight[rowI, colJ];
                }
            }

            cloneWeights.Add(currentWeight);
        }

        var cloneBiases = new List <double>(Biases);

        clone.Weights = cloneWeights;
        clone.Biases  = cloneBiases;

        var neuronsInHiddenLayers = HiddenLayers.Select(layer => layer.ColsCount).ToArray();

        clone.InitializeHiddenLayers(neuronsInHiddenLayers);

        return(clone);
    }
コード例 #8
0
        private void BackPropagate(params double[] targets)
        {
            NumCalc++;

            int actual = targets.ToList().IndexOf(targets.Max());

            double[] outputs   = OutputLayer.Select(a => a.Value).ToArray();
            int      predicted = outputs.ToList().IndexOf(outputs.Max());

            if (actual == predicted)
            {
                Accuracy += 1;
            }

            int i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            HiddenLayers.Reverse();
            HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.CalculateGradient()));
            HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.UpdateWeights(LearnRate, Momentum)));
            HiddenLayers.Reverse();
            OutputLayer.AsParallel().ForAll(a => a.UpdateWeights(LearnRate, Momentum));

            i = 0;
            double error = OutputLayer.Sum(a => Math.Abs(a.CalculateError(targets[i++])));

            Error += error;

            if (NumCalc % 1000 == 0)
            {
                Console.WriteLine($"Error: {Error / 1000 / 10} NumCalc: {NumCalc} Accuracy: {Accuracy / 10.0}");
                Error    = 0;
                Accuracy = 0;
            }
        }
コード例 #9
0
        private void BindKochonenNet()
        {
            AlltoAllBinding(InputLayer, HiddenLayers[0]);

            if (HiddenLayers.Count > 1)
            {
                for (int i = 0; i < HiddenLayers.Count - 1; i++)
                {
                    AlltoAllBinding(HiddenLayers[i], HiddenLayers[i + 1]);
                }
            }


            OnetoOneBinding(HiddenLayers.Last(), OutputLayer);

            double koordQ = InputLayer.neurons.Count;
            Random a      = new Random();

            foreach (var s in HiddenLayers[0].neurons)
            {
                foreach (var inl in s.inLinks)
                {
                    inl.w = 0.5 + (a.NextDouble() - 0.5) / koordQ;
                }
                foreach (var outl in s.outLinks)
                {
                    outl.w = 1d;
                }
            }
        }
コード例 #10
0
        private void ForwardPropagate(params double[] inputs)
        {
            int i = 0;

            InputLayer.ForEach(a => a.Value = inputs[i++]);
            HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.CalculateValue()));
            OutputLayer.AsParallel().ForAll(a => a.CalculateValue());
        }
コード例 #11
0
    private IEnumerator UpdateWeights()
    {
        OutputLayer.ForEach(n => n.InputSynapses.ForEach(s => s.Weight = s.WeightDelta));
        HiddenLayers.ForEach(l => l.ForEach(n => n.InputSynapses.ForEach(s => s.Weight = s.WeightDelta)));
        yield return(new WaitForSeconds(1));

        Debug.Log("End BP");
    }
コード例 #12
0
 public NeuralNetwork(int inputNeurons, int hiddenNeurons, int outputNeurons)
 {
     InputLayer = new InputLayer(inputNeurons);
     HiddenLayers.Add(new HiddenLayer(hiddenNeurons));
     OutputLayer = new OutputLayer(outputNeurons);
     Generate();
     CreateSubscribes();
 }
コード例 #13
0
ファイル: Network.cs プロジェクト: PrzChodor/ProjektSSI
        //Obliczenie wartości wyjściowej sieci neuronowej dla pojedyńczego elementu
        private void ForwardPropagate(double[] inputs)
        {
            var i = 0;

            InputLayer.ForEach(a => a.Value = inputs[i++]);
            HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateValue()));
            OutputLayer.ForEach(a => a.CalculateValue());
        }
コード例 #14
0
        public HiddenLayer CreateHiddenLayer()
        {
            HiddenLayer hiddenLayer;

            hiddenLayer = new HiddenLayer();
            HiddenLayers.Add(hiddenLayer);

            return(hiddenLayer);
        }
コード例 #15
0
 public NeuralNet Crossover(NeuralNet partner)
 {
     OutputLayer.Crossover(partner.OutputLayer);
     foreach (var layer in HiddenLayers.Zip(partner.HiddenLayers))
     {
         layer.Key.Crossover(layer.Value);
     }
     return(this);
 }
コード例 #16
0
 public NeuralNetworkGene GetGenes()
 {
     return(new NeuralNetworkGene
     {
         InputGene = InputLayer.GetGenes(),
         HiddenGenes = HiddenLayers.Select(l => l.GetGenes()).ToList(),
         OutputGene = OutputLayer.GetGenes()
     });
 }
コード例 #17
0
        private void BackPropagate(params double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            HiddenLayers.AsEnumerable().Reverse().ToList().ForEach(a => a.ForEach(b => b.CalculateGradient()));
            HiddenLayers.AsEnumerable().Reverse().ToList().ForEach(a => a.ForEach(b => b.UpdateWeights(LearnRate, Momentum)));
            OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));
        }
コード例 #18
0
        public void BuildFromDNA(string dna)
        {
            string[] parts = dna.Split(new[] { "||" }, StringSplitOptions.None);

            int inputCount            = int.Parse(parts[0]);
            int outputCount           = int.Parse(parts[1]);
            int depth                 = 0;
            int hiddenNeuronsPerLayer = 0;

            if (parts.Length > 2)
            {
                depth = int.Parse(parts[2]);
            }
            if (parts.Length > 3)
            {
                hiddenNeuronsPerLayer = int.Parse(parts[3]);
            }

            InputLayer = new NeuronsLayer(0, inputCount);

            InputLayer.BuildDendrites(1, 1);

            HiddenLayers = Enumerable.Range(1, depth).Select(i => new NeuronsLayer(i, hiddenNeuronsPerLayer)).ToList();

            OutputLayer = new NeuronsLayer(1 + depth, outputCount);

            HiddenLayers.ForEach(h => h.BuildDendrites(h == HiddenLayers.First() ? inputCount : hiddenNeuronsPerLayer, 0));

            OutputLayer.BuildDendrites(hiddenNeuronsPerLayer, 0);

            if (parts.Length > 4)
            {
                int weightCounter = 4;

                foreach (NeuronsLayer nl in HiddenLayers)
                {
                    foreach (Neuron n in nl.Neurons)
                    {
                        foreach (Dendrite d in n.Dendrites)
                        {
                            d.Weight = double.Parse(parts[weightCounter++]);
                        }
                    }
                }
                foreach (Neuron n in OutputLayer.Neurons)
                {
                    foreach (Dendrite d in n.Dendrites)
                    {
                        d.Weight = double.Parse(parts[weightCounter++]);
                    }
                }
            }
            AllLayers = new List <NeuronsLayer>();
            AllLayers.Add(InputLayer);
            AllLayers.AddRange(HiddenLayers);
            AllLayers.Add(OutputLayer);
        }
コード例 #19
0
        public void RandomizeWeights()
        {
            var random = new Random();

            RandomizeLayer(InputLayer, (HiddenLayers.FirstOrDefault() ?? OutputLayer).Count(), random);
            for (int i = 0; i < HiddenLayers.Count; i++)
            {
                RandomizeLayer(HiddenLayers[i], (HiddenLayers.ElementAtOrDefault(i + 1) ?? OutputLayer).Count, random);
            }
        }
コード例 #20
0
        private void BackPropagation(params double[] goal)
        {
            var i = 0;

            ExitLayer.Neurons.ForEach(x => x.CalculateGradient(goal[i++]));
            HiddenLayers.Reverse();
            HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.CalculateGradient()));
            HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.UpdateWeight(LearningRate, Momentum)));
            HiddenLayers.Reverse();
            ExitLayer.Neurons.ForEach(x => x.UpdateWeight(LearningRate, Momentum));
        }
コード例 #21
0
        private void ForwardPropagation(params double[] entryValue)
        {
            var i = 0;

            for (int j = 0; j < EntryLayer.Neurons.Count; j++)
            {
                EntryLayer.Neurons[i].Value = entryValue[i++];
            }
            HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.CalculateEntry()));
            ExitLayer.Neurons.ForEach(x => x.CalculateEntry());
        }
コード例 #22
0
ファイル: Network.cs プロジェクト: PrzChodor/ProjektSSI
        //Obliczenie gradientów i zaktualizowanie wag
        private void BackPropagate(double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            HiddenLayers.Reverse();
            HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateGradient()));
            HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearningRate, Momentum)));
            HiddenLayers.Reverse();
            OutputLayer.ForEach(a => a.UpdateWeights(LearningRate, Momentum));
        }
コード例 #23
0
    public void InitializeHiddenLayers(int[] hiddenNeuronCount)
    {
        InputLayer.Clear();
        HiddenLayers.Clear();
        OutputLayer.Clear();

        foreach (var neuronsCount in hiddenNeuronCount)
        {
            var newHiddenLayer = new Matrix(1, neuronsCount);
            HiddenLayers.Add(newHiddenLayer);
        }
    }
コード例 #24
0
        private void BackPropagate(params double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));
            foreach (var layer in HiddenLayers.AsEnumerable <List <Neuron> >().Reverse())
            {
                layer.ForEach(a => a.CalculateGradient());
                layer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));
            }
            OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));
        }
コード例 #25
0
        //[JsonRequired]
        //public BrainConfiguration Configuration { get; set; }

        public void InitLayers(BrainConfiguration configuration)
        {
            //Configuration = configuration;

            foreach (IBrainLayer layer in this)
            {
                layer.InitLayer(configuration);
                layer.InitFiredNeurons();
            }

            globals.CachedNeuronCount = HiddenLayers.Sum(l => l.Count);
        }
コード例 #26
0
        private void BackPropagate(params double[] targets)
        {
            var i = 0;

            OutputLayer.ForEach(a => a.CalculateGradient(targets[i++]));                  //Ably CalculateGradient() function to all output values which adjust the actual error to meet the desired error(MinimumError)
            foreach (var layer in HiddenLayers.AsEnumerable <List <Neuron> >().Reverse()) //go to the opposite direction from output to the hidden layer
            {
                layer.ForEach(a => a.CalculateGradient());                                //Ably the CalculateGradient() to hiddenLayers list which adjust the actual error to meet the desired error(MinimumError)
                layer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));                 //Ably the UpdateWeights() to hiddenLayers list which update the weight and bias for every node
            }
            OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum));               //Ably the UpdateWeights() to output list which update the weight and bias for every output
        }
 public void DeleteHiddenLayer(LayerViewModel l)
 {
     if (HiddenLayers.Contains(l))
     {
         HiddenLayers.Remove(l);
         for (int i = 0; i < HiddenLayers.Count; i++)
         {
             HiddenLayers[i].Number = i + 1;
         }
     }
     CanCreateChanged?.Invoke();
 }
コード例 #28
0
        private void SetHiddenLayerNeuronsErrors()
        {
            foreach (var layer in HiddenLayers.Reverse())
            {
                foreach (var neuron in layer.Neurons)
                {
                    var deltaSum = neuron.OutConnections.Sum(connection
                                                             => connection.Destination.Error * connection.Weight.Value);

                    neuron.Error = GetDerivative(neuron) * deltaSum;
                }
            }
        }
コード例 #29
0
        public void SetSimSettings(SimulationSettings simSettings)
        {
            if (simSettings != null)
            {
                this.simSettings        = simSettings;
                SimType                 = simSettings.SimType;
                Sessions                = simSettings.Sessions;
                Hours                   = simSettings.Hours;
                Score                   = simSettings.Score;
                chkSimDisplay.IsChecked = simSettings.EnableSimDisplay;

                if (!simSettings.EncogSelected)
                {
                    grpEncogSettings.Visibility = Visibility.Collapsed;
                    this.Height -= grpEncogSettings.Height;
                }
                else
                {
                    HiddenLayers         = simSettings.HiddenLayers;
                    txtHiddenLayers.Text = HiddenLayers.ToString();

                    HiddenLayerNeurons         = (simSettings.HiddenLayerNeurons <= 0) ? simSettings.NumSlots * simSettings.NumShelves : simSettings.HiddenLayerNeurons;
                    txtHiddenLayerNeurons.Text = HiddenLayerNeurons.ToString();
                }

                calculateMaxScore();
                simScoreSlider.Value      = simSettings.DefaultScorePercentage;
                simScoreSliderLbl.Content = simScoreSlider.Value + "%";

                switch (SimType)
                {
                case SimulationType.Sessions:
                    rdbSessions.IsChecked = true;
                    txtSimInput.Text      = Sessions.ToString();
                    showScoreSlider(false);
                    break;

                case SimulationType.Time:
                    rdbTime.IsChecked = true;
                    txtSimInput.Text  = Hours.ToString();
                    showScoreSlider(false);
                    break;

                default:
                    rdbScore.IsChecked = true;
                    txtSimInput.Text   = Score.ToString();
                    showScoreSlider(true);
                    break;
                }
            }
        }
コード例 #30
0
        private void BindClassicNet()
        {
            AlltoAllBinding(InputLayer, HiddenLayers[0]);

            if (HiddenLayers.Count > 1)
            {
                for (int i = 0; i < HiddenLayers.Count - 1; i++)
                {
                    AlltoAllBinding(HiddenLayers[i], HiddenLayers[i + 1]);
                }
            }

            AlltoAllBinding(HiddenLayers.Last(), OutputLayer);
        }