/// <summary> /// Run NBN training and its test - this is the first version /// </summary> /// <param name="trials">int - number of learning trials</param> /// <returns>LearnResult</returns>R private LearnResult RunFirstVersion(int trials) { backupSettings = new NeuralNetworkSettings(); backupSettings.MaxError = settings.MaxError; backupSettings.MaxIterations = settings.MaxIterations; backupSettings.MU = settings.MU; backupSettings.MUH = settings.MUH; backupSettings.MUL = settings.MUL; backupSettings.Scale = settings.Scale; Trials = trials; LearnResult result = new LearnResult(); result.Filename = Filename; if (MatLabCompareDataFolder.Length > 0) { result.Filename = string.Format("{0}\\{1}.dat", MatLabCompareDataFolder, Path.GetFileNameWithoutExtension(Directory.GetFiles(MatLabCompareDataFolder, "*.dat")[0])); } if (!loadInputData(Filename)) { updateErrorNBN("Dane nie zostały wczytane."); return(result); } if (OnDebug != null) { debug("Data loaded from file: " + Filename); } var tmp = new System.Collections.Generic.List <int>(); tmp.Add(inputLearn.Cols); //setting number of hidden neurons for (int i = 0; i < Handle; i++) { tmp.Add(1); } tmp.Add(1); var vh = new VectorHorizontal(tmp.Count); for (int i = 0; i < vh.Length; i++) { vh[i] = tmp[i]; } switch (NBN_Topography) { case 0: { topo = Topography.Generate(TopographyType.BMLP, vh); } break; case 1: { topo = Topography.Generate(TopographyType.MLP, vh); } break; } result.Topo = topo.Data[0]; if (OnDebug != null) { debug(topo.ToString()); } if (topo == null) { updateErrorNBN("Topologia sieci nie została utworzona."); return(result); } info = this.checkInputs(ref inputLearn, ref outputLearn, ref topo, out indexes);//here are set indexes result.TopoIndex = indexes.Data[0]; result.Info = info; if (OnDebug != null) { debug(indexes.ToString()); debug(info.ToString()); } Activation act = new Activation(info.nn); act.FillWithNumber(NBN_Activation); act.setValue(info.nn - 1, 0); result.ActivationFunction = act.Data[0]; Gain gain = new Gain(info.nn); gain.FillWithNumber(NBN_Gain); result.GainValue = gain.Data[0]; result.Settings = this.settings; for (trial = 0; trial < trials; trial++) { Weights initialWeights = new Weights(info.nw); if (MatLabCompareDataFolder.Length > 0) { initialWeights = MatrixMB.Load(string.Format("{0}\\poczatkowe_wagi_proba_{1}.txt", MatLabCompareDataFolder, trial + 1)).ToWeights(); } else { initialWeights = Weights.Generate(info.nw); } if (IsResearchMode) { string initialWeightsFile = String.Format("{0}\\{1}{2}_initial_weights.dat", _reasearch_folder, trial, Path.GetFileNameWithoutExtension(result.Filename)); initialWeights.Store(initialWeightsFile); } initialWeights.Name = "Initial"; if (OnDebug != null) { debug(String.Format("\r\nTrial {0} from {1}\r\n", trial + 1, trials)); debug(initialWeights.ToString()); } settings = null; settings = NeuralNetworkSettings.Default(); settings.MaxError = backupSettings.MaxError; settings.MaxIterations = backupSettings.MaxIterations; settings.MU = backupSettings.MU; settings.MUH = backupSettings.MUH; settings.MUL = backupSettings.MUL; settings.Scale = backupSettings.Scale; I = MatrixMB.Eye(info.nw); tic();//learn time measure start var tr = Train(ref this.settings, ref this.info, ref this.inputLearn, ref this.outputLearn, ref this.topo, initialWeights, ref act, ref gain, ref indexes); String LearnExecutionTime = toc(); //learn time measure stop LearnTimeList = time.ElapsedTicks; //learn time measure save result.Add(tr.weights.Data[0], SSE.ToDoubleArray(), RMSE.ToDoubleArray()); result.LearnRMSE = (double)RMSE[RMSE.Count]; LearnRmseList = LastRMSE; if (OnDebug != null) { debug(tr.weights.ToString()); debug("\r\nLearn execution time: " + LearnExecutionTime + "(hours:minutes:seconds:miliseconds)\r\n"); debug("\r\nLearn SSE: " + tr.sse.ToString() + "\r\n"); debug("\r\nLearn RMSE: " + result.LearnRMSE.ToString() + "\r\n"); } updateError(result.LearnRMSE); NetworkInfo infoTest = info.Copy(); infoTest.np = inputTest.Rows; tic(); error.CalculateError(ref infoTest, ref inputTest, ref outputTest, ref topo, tr.weights, ref act, ref gain, ref indexes); var TestExecutionTime = toc(); TestTimeList = time.ElapsedTicks; result.TestRMSE = Math.Sqrt(error.Error / infoTest.np); TestRmseList = result.TestRMSE; result.TestingRmseList.Add(result.TestRMSE); if (OnDebug != null) { debug("\r\nTest execution time: " + TestExecutionTime + "(hours:minutes:seconds:miliseconds)\r\n"); debug("\r\nTest SSE: " + error.Error.ToString() + "\r\n"); debug("\r\nTest RMSE: " + result.TestRMSE.ToString() + "\r\n"); } //if (result.LearnRMSE < Threshold) IsTrainOK++; if (result.SSE[trial][result.SSE[trial].Length - 1] < Threshold) { IsTrainOK++; } } result.LearnRMSE = AverageLearnRMSE; result.TestRMSE = AverageTestRMSE; result.setStatisticsData(LearnRMSE, TestRMSE, LearnTime, TestTime, Trials); result.SuccessRate = (double)IsTrainOK / Trials; if (IsResearchMode)//save research { try { string filename = extractionFolder + "\\research result.pdf"; PDFGenerate data = new PDFGenerate(); data.Filename = filename; data.Result = result; data.ChartFilename = GeneratePlot(result.RMSE, Path.GetFileNameWithoutExtension(result.Filename)); HistoryPDF pdf = new HistoryPDF(data.Result, data.ChartFilename, true); pdf.Save(data.Filename); } catch { } } return(result); }
/// <summary> /// Run NBN training and its test - this is the first version /// </summary> /// <param name="trials">int - number of learning trials</param> /// <returns>LearnResult</returns>R private LearnResult RunFirstVersion(int trials) { backupSettings = new NeuralNetworkSettings(); backupSettings.MaxError = settings.MaxError; backupSettings.MaxIterations = settings.MaxIterations; backupSettings.MU = settings.MU; backupSettings.MUH = settings.MUH; backupSettings.MUL = settings.MUL; backupSettings.Scale = settings.Scale; Trials = trials; LearnResult result = new LearnResult(); result.Filename = Filename; if (MatLabCompareDataFolder.Length > 0) { result.Filename = string.Format("{0}\\{1}.dat", MatLabCompareDataFolder, Path.GetFileNameWithoutExtension(Directory.GetFiles(MatLabCompareDataFolder, "*.dat")[0])); } if (!loadInputData(Filename)) { updateErrorNBN("Dane nie zostały wczytane."); return result; } if (OnDebug != null) debug("Data loaded from file: " + Filename); var tmp = new System.Collections.Generic.List<int>(); tmp.Add(inputLearn.Cols); //setting number of hidden neurons for (int i = 0; i < Handle; i++) { tmp.Add(1); } tmp.Add(1); var vh = new VectorHorizontal(tmp.Count); for (int i = 0; i < vh.Length; i++) { vh[i] = tmp[i]; } switch (NBN_Topography) { case 0: { topo = Topography.Generate(TopographyType.BMLP, vh); } break; case 1: { topo = Topography.Generate(TopographyType.MLP, vh); } break; } result.Topo = topo.Data[0]; if (OnDebug != null) debug(topo.ToString()); if (topo == null) { updateErrorNBN("Topologia sieci nie została utworzona."); return result; } info = this.checkInputs(ref inputLearn, ref outputLearn, ref topo, out indexes);//here are set indexes result.TopoIndex = indexes.Data[0]; result.Info = info; if (OnDebug != null) { debug(indexes.ToString()); debug(info.ToString()); } Activation act = new Activation(info.nn); act.FillWithNumber(NBN_Activation); act.setValue(info.nn - 1, 0); result.ActivationFunction = act.Data[0]; Gain gain = new Gain(info.nn); gain.FillWithNumber(NBN_Gain); result.GainValue = gain.Data[0]; result.Settings = this.settings; for (trial = 0; trial < trials; trial++) { Weights initialWeights = new Weights(info.nw); if (MatLabCompareDataFolder.Length > 0) { initialWeights = MatrixMB.Load(string.Format("{0}\\poczatkowe_wagi_proba_{1}.txt", MatLabCompareDataFolder, trial + 1)).ToWeights(); } else { initialWeights = Weights.Generate(info.nw); } if (IsResearchMode) { string initialWeightsFile = String.Format("{0}\\{1}{2}_initial_weights.dat", _reasearch_folder, trial, Path.GetFileNameWithoutExtension(result.Filename)); initialWeights.Store(initialWeightsFile); } initialWeights.Name = "Initial"; if (OnDebug != null) { debug(String.Format("\r\nTrial {0} from {1}\r\n", trial + 1, trials)); debug(initialWeights.ToString()); } settings = null; settings = NeuralNetworkSettings.Default(); settings.MaxError = backupSettings.MaxError; settings.MaxIterations = backupSettings.MaxIterations; settings.MU = backupSettings.MU; settings.MUH = backupSettings.MUH; settings.MUL = backupSettings.MUL; settings.Scale = backupSettings.Scale; I = MatrixMB.Eye(info.nw); tic();//learn time measure start var tr = Train(ref this.settings, ref this.info, ref this.inputLearn, ref this.outputLearn, ref this.topo, initialWeights, ref act, ref gain, ref indexes); String LearnExecutionTime = toc();//learn time measure stop LearnTimeList = time.ElapsedTicks;//learn time measure save result.Add(tr.weights.Data[0], SSE.ToDoubleArray(), RMSE.ToDoubleArray()); result.LearnRMSE = (double)RMSE[RMSE.Count]; LearnRmseList = LastRMSE; if (OnDebug != null) { debug(tr.weights.ToString()); debug("\r\nLearn execution time: " + LearnExecutionTime + "(hours:minutes:seconds:miliseconds)\r\n"); debug("\r\nLearn SSE: " + tr.sse.ToString() + "\r\n"); debug("\r\nLearn RMSE: " + result.LearnRMSE.ToString() + "\r\n"); } updateError(result.LearnRMSE); NetworkInfo infoTest = info.Copy(); infoTest.np = inputTest.Rows; tic(); error.CalculateError(ref infoTest, ref inputTest, ref outputTest, ref topo, tr.weights, ref act, ref gain, ref indexes); var TestExecutionTime = toc(); TestTimeList = time.ElapsedTicks; result.TestRMSE = Math.Sqrt(error.Error / infoTest.np); TestRmseList = result.TestRMSE; result.TestingRmseList.Add(result.TestRMSE); if (OnDebug != null) { debug("\r\nTest execution time: " + TestExecutionTime + "(hours:minutes:seconds:miliseconds)\r\n"); debug("\r\nTest SSE: " + error.Error.ToString() + "\r\n"); debug("\r\nTest RMSE: " + result.TestRMSE.ToString() + "\r\n"); } //if (result.LearnRMSE < Threshold) IsTrainOK++; if (result.SSE[trial][result.SSE[trial].Length - 1] < Threshold) IsTrainOK++; } result.LearnRMSE = AverageLearnRMSE; result.TestRMSE = AverageTestRMSE; result.setStatisticsData(LearnRMSE, TestRMSE, LearnTime, TestTime, Trials); result.SuccessRate = (double)IsTrainOK / Trials; if (IsResearchMode)//save research { try { string filename = extractionFolder + "\\research result.pdf"; PDFGenerate data = new PDFGenerate(); data.Filename = filename; data.Result = result; data.ChartFilename = GeneratePlot(result.RMSE, Path.GetFileNameWithoutExtension(result.Filename)); HistoryPDF pdf = new HistoryPDF(data.Result, data.ChartFilename, true); pdf.Save(data.Filename); } catch { } } return result; }
/// <summary> /// Train neural netowrk /// </summary> /// <param name="setting">NeuralNetworkSettings</param> /// <param name="info">NetworkInfo</param> /// <param name="inp">Input</param> /// <param name="dout">Output</param> /// <param name="topo">Topography</param> /// <param name="initialWeights">Weights</param> /// <param name="act">Activation</param> /// <param name="gain">Gain</param> /// <param name="iw">Index</param> public TrainResult Train(ref NeuralNetworkSettings setting, ref NetworkInfo info, ref Input inp, ref Output dout, ref Topography topo, Weights initialWeights, ref Activation act, ref Gain gain, ref Index iw) { TrainResult result = new TrainResult(); result.weights = new Weights(initialWeights.Length); result.iterations = 0; result.sse = 0; try { if (OnDebug != null) { debug(setting.ToString()); debug(act.ToString()); debug(gain.ToString()); } result.weights = initialWeights.Backup(); error.CalculateError(ref info, ref inp, ref dout, ref topo, result.weights, ref act, ref gain, ref iw); if (OnDebug != null) { debug("\r\nFirst error value: " + error.Error.ToString() + "\r\n"); } SSE.Clear(); RMSE.Clear(); SSE[0] = result.sse = error.Error; hessians.Clear(); var hessian = new Hessian(ref info); Input ii = inp.Copy().ToInput(); Output oo = dout.Copy().ToOutput(); for (result.iterations = 1; result.iterations < setting.MaxIterations; result.iterations++) { hessian.Compute(ref info, ref inp, ref dout, ref topo, result.weights, ref act, ref gain, ref iw); if (OnDebug != null) { debug(hessian.ToString()); } hessians.Add(hessian.HessianMat); Weights ww_backup = result.weights.Backup(); for (int jw = 0; jw < 30; jw++) { var diff = (hessian.HessianMat + (I * setting.MU)).SolveEquatation(hessian.GradientMat).Transposed; if (OnDebug != null) { debug("\r\nOdejmuję"); debug(diff.MatrixToString()); } result.weights = ww_backup - diff.ToWeights(); result.weights.Name = "Weights nr " + jw.ToString(); if (OnDebug != null) { bool areSame = result.weights.IsEqual(ww_backup); debug("\r\nWeights are same as previously backed up"); debug(result.weights.ToString()); } SSE[result.iterations] = result.sse = error.CalculateError(ref info, ref inp, ref dout, ref topo, result.weights, ref act, ref gain, ref iw); if (OnDebug != null) { debug("\r\nSSE[" + result.iterations.ToString() + "] = " + error.Error.ToString()); } if (SSE.CurrentSSE() <= SSE.PreviousSSE(result.iterations)) { if (setting.MU > setting.MUL) { setting.MU /= setting.Scale; } break; } if (setting.MU < setting.MUH) { setting.MU *= setting.Scale; } } double rmse = Math.Sqrt((SSE.CurrentSSE()) / inp.Rows); RMSE[result.iterations] = rmse; updateChart(result.iterations, rmse); if ((double)SSE[result.iterations] < setting.MaxError) { break; } if (OnDebug != null) { debug("Błąd: " + rmse.ToString()); } if ( (SSE.PreviousSSE(result.iterations) - ((double)SSE[result.iterations])) / SSE.PreviousSSE(result.iterations) < NetworkError.DesiredError//0.000000000000001 ) { break; } } } catch (Exception ex) { throw new NeuralNetworkError("Błąd uczenia sieci. " + ex.Message, ex); } return(result); }//trainer end
/// <summary> /// Train neural netowrk /// </summary> /// <param name="setting">NeuralNetworkSettings</param> /// <param name="info">NetworkInfo</param> /// <param name="inp">Input</param> /// <param name="dout">Output</param> /// <param name="topo">Topography</param> /// <param name="initialWeights">Weights</param> /// <param name="act">Activation</param> /// <param name="gain">Gain</param> /// <param name="iw">Index</param> public TrainResult Train(ref NeuralNetworkSettings setting, ref NetworkInfo info, ref Input inp, ref Output dout, ref Topography topo, Weights initialWeights, ref Activation act, ref Gain gain, ref Index iw) { TrainResult result = new TrainResult(); result.weights = new Weights(initialWeights.Length); result.iterations = 0; result.sse = 0; try { if (OnDebug != null) { debug(setting.ToString()); debug(act.ToString()); debug(gain.ToString()); } result.weights = initialWeights.Backup(); error.CalculateError(ref info, ref inp, ref dout, ref topo, result.weights, ref act, ref gain, ref iw); if (OnDebug != null) { debug("\r\nFirst error value: " + error.Error.ToString() + "\r\n"); } SSE.Clear(); RMSE.Clear(); SSE[0] = result.sse = error.Error; hessians.Clear(); var hessian = new Hessian(ref info); Input ii = inp.Copy().ToInput(); Output oo = dout.Copy().ToOutput(); for (result.iterations = 1; result.iterations < setting.MaxIterations; result.iterations++) { hessian.Compute(ref info, ref inp, ref dout, ref topo, result.weights, ref act, ref gain, ref iw); if (OnDebug != null) debug(hessian.ToString()); hessians.Add(hessian.HessianMat); Weights ww_backup = result.weights.Backup(); for (int jw = 0; jw < 30; jw++) { var diff = (hessian.HessianMat + (I * setting.MU)).SolveEquatation(hessian.GradientMat).Transposed; if (OnDebug != null) { debug("\r\nOdejmuję"); debug(diff.MatrixToString()); } result.weights = ww_backup - diff.ToWeights(); result.weights.Name = "Weights nr " + jw.ToString(); if (OnDebug != null) { bool areSame = result.weights.IsEqual(ww_backup); debug("\r\nWeights are same as previously backed up"); debug(result.weights.ToString()); } SSE[result.iterations] = result.sse = error.CalculateError(ref info, ref inp, ref dout, ref topo, result.weights, ref act, ref gain, ref iw); if (OnDebug != null) debug("\r\nSSE[" + result.iterations.ToString() + "] = " + error.Error.ToString()); if (SSE.CurrentSSE() <= SSE.PreviousSSE(result.iterations)) { if (setting.MU > setting.MUL) { setting.MU /= setting.Scale; } break; } if (setting.MU < setting.MUH) { setting.MU *= setting.Scale; } } double rmse = Math.Sqrt((SSE.CurrentSSE()) / inp.Rows); RMSE[result.iterations] = rmse; updateChart(result.iterations, rmse); if ((double)SSE[result.iterations] < setting.MaxError) { break; } if (OnDebug != null) debug("Błąd: " + rmse.ToString()); if ( (SSE.PreviousSSE(result.iterations) - ((double)SSE[result.iterations])) / SSE.PreviousSSE(result.iterations) < NetworkError.DesiredError//0.000000000000001 ) { break; } } } catch (Exception ex) { throw new NeuralNetworkError("Błąd uczenia sieci. " + ex.Message, ex); } return result; }