private void BackPropagate(params double[] targets) { NumCalc++; int actual = targets.ToList().IndexOf(targets.Max()); double[] outputs = OutputLayer.Select(a => a.Value).ToArray(); int predicted = outputs.ToList().IndexOf(outputs.Max()); if (actual == predicted) { Accuracy += 1; } int i = 0; OutputLayer.ForEach(a => a.CalculateGradient(targets[i++])); HiddenLayers.Reverse(); HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.CalculateGradient())); HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.UpdateWeights(LearnRate, Momentum))); HiddenLayers.Reverse(); OutputLayer.AsParallel().ForAll(a => a.UpdateWeights(LearnRate, Momentum)); i = 0; double error = OutputLayer.Sum(a => Math.Abs(a.CalculateError(targets[i++]))); Error += error; if (NumCalc % 1000 == 0) { Console.WriteLine($"Error: {Error / 1000 / 10} NumCalc: {NumCalc} Accuracy: {Accuracy / 10.0}"); Error = 0; Accuracy = 0; } }
private void BackPropagate(params double[] targets) { var i = 0; OutputLayer.ForEach(a => a.CalculateGradient(targets[i++])); HiddenLayers.Reverse(); //HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateGradient())); HiddenLayers.ForEach(a => Parallel.ForEach(a, (b) => { b.CalculateGradient(); }) ); //HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearnRate, Momentum))); HiddenLayers.ForEach(a => Parallel.ForEach(a, (b) => { b.UpdateWeights(LearnRate, Momentum); }) ); HiddenLayers.Reverse(); //OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum)); Parallel.ForEach(OutputLayer, (on) => { on.UpdateWeights(LearnRate, Momentum); }); }
//Obliczenie wartości wyjściowej sieci neuronowej dla pojedyńczego elementu private void ForwardPropagate(double[] inputs) { var i = 0; InputLayer.ForEach(a => a.Value = inputs[i++]); HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateValue())); OutputLayer.ForEach(a => a.CalculateValue()); }
private void ForwardPropagate(params double[] inputs) { int i = 0; InputLayer.ForEach(a => a.Value = inputs[i++]); HiddenLayers.ForEach(a => a.AsParallel().ForAll(b => b.CalculateValue())); OutputLayer.AsParallel().ForAll(a => a.CalculateValue()); }
private IEnumerator UpdateWeights() { OutputLayer.ForEach(n => n.InputSynapses.ForEach(s => s.Weight = s.WeightDelta)); HiddenLayers.ForEach(l => l.ForEach(n => n.InputSynapses.ForEach(s => s.Weight = s.WeightDelta))); yield return(new WaitForSeconds(1)); Debug.Log("End BP"); }
public void BuildFromDNA(string dna) { string[] parts = dna.Split(new[] { "||" }, StringSplitOptions.None); int inputCount = int.Parse(parts[0]); int outputCount = int.Parse(parts[1]); int depth = 0; int hiddenNeuronsPerLayer = 0; if (parts.Length > 2) { depth = int.Parse(parts[2]); } if (parts.Length > 3) { hiddenNeuronsPerLayer = int.Parse(parts[3]); } InputLayer = new NeuronsLayer(0, inputCount); InputLayer.BuildDendrites(1, 1); HiddenLayers = Enumerable.Range(1, depth).Select(i => new NeuronsLayer(i, hiddenNeuronsPerLayer)).ToList(); OutputLayer = new NeuronsLayer(1 + depth, outputCount); HiddenLayers.ForEach(h => h.BuildDendrites(h == HiddenLayers.First() ? inputCount : hiddenNeuronsPerLayer, 0)); OutputLayer.BuildDendrites(hiddenNeuronsPerLayer, 0); if (parts.Length > 4) { int weightCounter = 4; foreach (NeuronsLayer nl in HiddenLayers) { foreach (Neuron n in nl.Neurons) { foreach (Dendrite d in n.Dendrites) { d.Weight = double.Parse(parts[weightCounter++]); } } } foreach (Neuron n in OutputLayer.Neurons) { foreach (Dendrite d in n.Dendrites) { d.Weight = double.Parse(parts[weightCounter++]); } } } AllLayers = new List <NeuronsLayer>(); AllLayers.Add(InputLayer); AllLayers.AddRange(HiddenLayers); AllLayers.Add(OutputLayer); }
//Obliczenie gradientów i zaktualizowanie wag private void BackPropagate(double[] targets) { var i = 0; OutputLayer.ForEach(a => a.CalculateGradient(targets[i++])); HiddenLayers.Reverse(); HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateGradient())); HiddenLayers.ForEach(a => a.ForEach(b => b.UpdateWeights(LearningRate, Momentum))); HiddenLayers.Reverse(); OutputLayer.ForEach(a => a.UpdateWeights(LearningRate, Momentum)); }
private void BackPropagation(params double[] goal) { var i = 0; ExitLayer.Neurons.ForEach(x => x.CalculateGradient(goal[i++])); HiddenLayers.Reverse(); HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.CalculateGradient())); HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.UpdateWeight(LearningRate, Momentum))); HiddenLayers.Reverse(); ExitLayer.Neurons.ForEach(x => x.UpdateWeight(LearningRate, Momentum)); }
private void ForwardPropagation(params double[] entryValue) { var i = 0; for (int j = 0; j < EntryLayer.Neurons.Count; j++) { EntryLayer.Neurons[i].Value = entryValue[i++]; } HiddenLayers.ForEach(x => x.Neurons.ForEach(y => y.CalculateEntry())); ExitLayer.Neurons.ForEach(x => x.CalculateEntry()); }
private void BackPropagate(params double[] targets) { var i = 0; OutputLayer.ForEach(a => a.CalculateGradient(targets[i++])); foreach (var layer in HiddenLayers.AsEnumerable <List <Neuron> >().Reverse()) { layer.ForEach(a => a.CalculateGradient()); } HiddenLayers.ForEach(hl => hl.ForEach(n => n.UpdateWeights(LearnRate, Momentum))); OutputLayer.ForEach(a => a.UpdateWeights(LearnRate, Momentum)); }
private void ForwardPropagate(bool dropOut, params double[] inputs) { //InputLayer.ForEach(a => a.Value = inputs[a.Index]); Parallel.ForEach(InputLayer, (a) => { a.Value = inputs[a.Index]; }); //HiddenLayers.ForEach(a => a.ForEach(b => b.CalculateValue(dropOut))); HiddenLayers.ForEach(a => Parallel.ForEach(a, (b) => { b.CalculateValue(dropOut); }) ); OutputLayer.ForEach(a => a.CalculateValue(false)); }
private void UpdateHiddenLayerOutput() { HiddenLayers.ForEach(h => h.UpdateNeuronsOutput()); }
private void InitHiddenLayersWithRandomValues() { HiddenLayers.ForEach(h => InitHiddenLayerWithRandomValues(h)); }
public double[] PredictionFor(double[] inputs, ParallelOptions parallelOptions) { InputLayer.SetInputLayerOutputs(inputs); HiddenLayers.ForEach(layer => layer.Neurons.ParallelForEach(a => a.CalculateOutput(), parallelOptions)); return(OutputLayer.Neurons.Select(a => a.CalculateOutput()).ToArray()); }
public virtual void ResetNetwork() { OutputLayer.ForEach(neuron => neuron.ResetNeuron()); HiddenLayers.ForEach(layer => layer.ForEach(neuron => neuron.ResetNeuron())); }