public void LearnByRandom(double maxError) { int weightCount = Weight.Count(); double GlobalError = double.MaxValue; while (GlobalError > maxError) { GlobalError = 0; GenerateWeight(); for (int x = 0; x < Input.Count; x++) { NeuronLayers[0] = Input[x]; GetError(); GlobalError += AbsError(Output[x]); } Console.WriteLine(GlobalError); } Console.WriteLine(GlobalError); foreach (var item in Weight) { Console.WriteLine(item); } }
public void calculate() { List <Val> answer = new List <Val>(); Val[] wp = new Val[Weight.Count()]; for (int i = 0; i < Weight.Count(); i++) { wp[i] = new Val(i, Price[i] / Weight[i]); } Array.Sort(wp); bool fim = false; while (fim == false) { int i = 0; if (Weight[wp[i].position] < Size - GreaterWeight) { answer.Add(wp[i]); GreaterPrice += Price[wp[i].position]; GreaterWeight += Weight[wp[i].position]; i++; } else { ListSize = answer.Count(); fim = true; } } printValues(); }
public double LearnByBackPropagation(int Epoch, double Lambda) { GenerateWeight(); double GlobalError = 0; do { for (int x = 0; x < Input.Count; x++) { NeuronLayers[0] = Input[x]; GetError(); double[] Etotal = PowError(Output[x]); int weighIndex = Weight.Count() - 1; int Last = NeuronLayers.Count() - 1; double[] increment = new double[weighIndex + 1]; #region First Layer for (int i = NeuronLayers[Last].Count() - 1; i > -1; i--) { double curentWeight = 1; double err = NeuronLayers.Last()[i] - Output[x][i]; double log = NeuronLayers.Last()[i] * (1 - NeuronLayers.Last()[i]); double total = err * log * curentWeight; increment[weighIndex--] = total; for (int j = NeuronLayers[Last - 1].Count() - 1; j > -1; j--) { double curentWeight1 = NeuronLayers[NeuronLayers.Count() - 2][i]; double err1 = NeuronLayers.Last()[i] - Output[x][i]; double log1 = NeuronLayers.Last()[i] * (1 - NeuronLayers.Last()[i]); double total1 = err1 * log1 * curentWeight1; increment[weighIndex--] = total1; } } #endregion #region ErrorOutput int backIndex = weighIndex - 1; List <double> sumsError = new List <double>(); for (int w = 0; w < NeuronLayers[NeuronLayers.Count() - 2].Count(); w++) { List <double> er = new List <double>(); for (int i = 0; i < Output[x].Count(); i++) { double err = NeuronLayers.Last()[i] - Output[x][i]; double log = NeuronLayers.Last()[i] * (1 - NeuronLayers.Last()[i]); double ww = (Weight[backIndex]); double y = err * log * ww; er.Add(y); backIndex += Output[x].Count() + 1; } sumsError.Add(er.Sum()); backIndex = weighIndex; } #endregion #region Other Layers for (int yy = NeuronLayers.Count() - 2; yy > 0; yy--) { for (int i = NeuronLayers[yy].Count() - 1; i > -1; i--) { double curentWeight = 1; double err = sumsError[i]; double log = NeuronLayers[yy][i] * (1 - NeuronLayers[yy][i]); double total = err * log * curentWeight; increment[weighIndex--] = total; for (int j = NeuronLayers[yy - 1].Count() - 1; j > -1; j--) { double curentWeight1 = NeuronLayers[yy - 1][j]; double err1 = sumsError[i]; double log1 = NeuronLayers[yy][i] * (1 - NeuronLayers[yy][i]); double total1 = err1 * log1 * curentWeight1; increment[weighIndex--] = total1; } } #region Error Hidden Layers backIndex = weighIndex; List <double> NewSumError = new List <double>(); for (int w = 0; w < NeuronLayers[yy - 1].Count(); w++) { List <double> er = new List <double>(); for (int k = 0; k < NeuronLayers[yy].Count() - 1; k++) { double y = (sumsError[k]) * (Weight[++backIndex]); er.Add(y); } NewSumError.Add(er.Sum()); } sumsError.Clear(); sumsError.AddRange(NewSumError); #endregion } #endregion for (int i = 0; i < Weight.Count(); i++) { Weight[i] -= increment[i] * Lambda; } } //GlobalError = 0; //for (int x = 0; x < Input.Count; x++) //{ // NeuronLayers[0] = Input[x]; // GetError(); // GlobalError += AbsError(Output[x]); //} //Console.WriteLine("Epoch :{0} Global_Error:{1}", Epoch, GlobalError); } while (Epoch-- > 0); for (int x = 0; x < Input.Count; x++) { NeuronLayers[0] = Input[x]; GetError(); GlobalError += AbsError(Output[x]); } Console.WriteLine(" Global_Error:{0}", GlobalError); return(GlobalError); }