public void Demo_008_Neural_Network_4() { Console.WriteLine("Neural Networkのデモンストレーションです"); Console.WriteLine("4回目です。私も飽きてきましたが、最後です"); Console.WriteLine("今回は多クラス分類の問題を扱います。\n\n"); //教師データを設定します。 //入力は一次元ですが、出力は二次元とします。 List <double[, ]> list_x = new List <double[, ]>(); List <double[, ]> list_t = new List <double[, ]>(); for (int j = 0; j < 6; j++) { double[,] x = new double[1, 1]; x[0, 0] = j * 1.0; list_x.Add(x); double[,] t = new double[2, 1]; if (j < 2 || 3 < j) { t[0, 0] = 1; t[1, 0] = 0; } else { t[0, 0] = 0; t[1, 0] = 1; } list_t.Add(t); } Console.WriteLine("今回の入力値xと、答えtです"); Console.WriteLine("x < 2 , 3 < x のとき、t[0,0]=1 , t[1,0]=0になります。それ以外の場合、逆になります。"); Console.Write("x\t"); foreach (double[,] x in list_x) { Console.Write(x[0, 0] + "\t"); } Console.WriteLine("\t"); Console.Write("t[0,0]\t"); foreach (double[,] t in list_t) { Console.Write(t[0, 0] + "\t"); } Console.WriteLine("\t"); Console.Write("t[1,0]\t"); foreach (double[,] t in list_t) { Console.Write(t[1, 0] + "\t"); } Console.WriteLine("\n\n"); Console.WriteLine("Demo007の結果から、この分布は2層以上のNeural Networkでのみ解くことができます"); Console.WriteLine("第1層の計数行列はwは2行1列の行列とします"); Console.WriteLine("活性化関数はSigmoid関数とします"); double[,] w_1 = new double[2, 1]; w_1[0, 0] = 10; w_1[1, 0] = -10; Console.WriteLine("第1層の計数行列 w"); this.Show_Matrix_Element(w_1); double[,] b_1 = new double[2, 1]; b_1[0, 0] = -20; b_1[1, 0] = 20; Console.WriteLine("第1層のバイアスベクトル b"); this.Show_Matrix_Element(b_1); //第一層は隠れ層なので、別のクラスです Hidden_Layer hd_1 = new Hidden_Layer(); hd_1.Preset_1_4th_Set_w(w_1); hd_1.Preset_2_4th_Set_b(b_1); hd_1.Preset_3_4th_Set_Hyper_Parameter(0.1, 0, 0, 0); hd_1.Preset_4_4th_Set_activation_Function(new Sigmoid_IFunction()); Console.WriteLine("\n"); Console.WriteLine("第2層の計数行列はwは2行2列の行列とします"); Console.WriteLine("活性化関数はSoftMax関数で固定です"); double[,] w_2 = new double[2, 2]; w_2[0, 0] = 3; w_2[0, 1] = -3; w_2[1, 0] = 1; w_2[1, 1] = -2; Console.WriteLine("第2層の計数行列 w"); this.Show_Matrix_Element(w_2); double[,] b_2 = new double[2, 1]; b_2[0, 0] = 2; b_2[0, 0] = -1; Console.WriteLine("第2層のバイアスベクトル b"); this.Show_Matrix_Element(b_2); Console.WriteLine("\n\n"); Multiclass_Classification_Final_Layer mcfl = new Multiclass_Classification_Final_Layer(); mcfl.Preset_1_3rd_Set_w(w_2); mcfl.Preset_2_3rd_Set_b(b_2); mcfl.Preset_3_3rd_Set_Hyper_Parameter(0.01, 0, 0, 0); Console.WriteLine("\n\n"); Console.WriteLine("学習前のNeural Networkの出力を確認します。"); Console.WriteLine("これまで途中結果の表示が大切だと考えてきたのですが、画面がごちゃつくだけなので最終層の出力だけ表示します。"); List <double[, ]> list_f_wx_plus_b_2 = new List <double[, ]>(); for (int j = 0; j < list_x.Count; j++) { //順伝搬 hd_1.Step_1_3rd_Forward_Propagation(list_x[j]); mcfl.Step_1_3rd_Forward_Propagation(hd_1.Get_f_wx_plus_b()); list_f_wx_plus_b_2.Add(mcfl.Get_f_wx_plus_b()); } Console.Write("x\t"); foreach (double[,] x in list_x) { Console.Write(x[0, 0] + "\t"); } Console.WriteLine("\t"); Console.Write("t[0,0]\t"); foreach (double[,] t in list_t) { Console.Write(t[0, 0] + "\t"); } Console.WriteLine("\t"); Console.Write("t[1,0]\t"); foreach (double[,] t in list_t) { Console.Write(t[1, 0] + "\t"); } Console.WriteLine("\t"); Console.Write("y[0,0]\t"); foreach (double[,] t in list_f_wx_plus_b_2) { Console.Write(t[0, 0].ToString("G3") + "\t"); } Console.WriteLine("\t"); Console.Write("y[1,0]\t"); foreach (double[,] t in list_f_wx_plus_b_2) { Console.Write(t[1, 0].ToString("G3") + "\t"); } Console.WriteLine("\n\n"); Console.WriteLine("xの値によらず、すべて[0,0]のクラスであると認識したようです。"); Console.WriteLine("100,000 epoch 学習してみます。"); Console.WriteLine("\n\n"); int epoch = 1000 + 1; double[] loss = new double[list_x.Count]; int row_num = 0; double maximum_loss = 0; for (int j = 0; j < epoch; j++) { //各条件の誤差を比較する for (int k = 0; k < list_x.Count; k++) { //順伝搬 hd_1.Step_1_3rd_Forward_Propagation(list_x[k]); mcfl.Step_1_3rd_Forward_Propagation(hd_1.Get_f_wx_plus_b()); mcfl.Step_2_3rd_Calculate_Target_Function_and_Delta(list_t[k]); loss[k] = mcfl.Get_target_function(); } //誤差が最も大きいx,tの組のみ学習する row_num = 0; maximum_loss = loss[0]; for (int k = 1; k < loss.Length; k++) { if (maximum_loss < loss[k]) { row_num = k; maximum_loss = loss[k]; } } //順伝搬 hd_1.Step_1_3rd_Forward_Propagation(list_x[row_num]); mcfl.Step_1_3rd_Forward_Propagation(hd_1.Get_f_wx_plus_b()); //逆伝搬 mcfl.Step_2_3rd_Calculate_Target_Function_and_Delta(list_t[row_num]); //hd_1.Step_2_3rd_Calculate_Delta(mcfl.Get_w(), mcfl.Get_delta()); //パラメータの更新 mcfl.Step_3_3rd_Update(); //hd_1.Step_3_3rd_Update(); } list_f_wx_plus_b_2 = new List <double[, ]>(); for (int j = 0; j < list_x.Count; j++) { //順伝搬 hd_1.Step_1_3rd_Forward_Propagation(list_x[j]); mcfl.Step_1_3rd_Forward_Propagation(hd_1.Get_f_wx_plus_b()); list_f_wx_plus_b_2.Add(mcfl.Get_f_wx_plus_b()); } Console.Write("x\t"); foreach (double[,] x in list_x) { Console.Write(x[0, 0] + "\t"); } Console.WriteLine("\t"); Console.Write("t[0,0]\t"); foreach (double[,] t in list_t) { Console.Write(t[0, 0] + "\t"); } Console.WriteLine("\t"); Console.Write("t[1,0]\t"); foreach (double[,] t in list_t) { Console.Write(t[1, 0] + "\t"); } Console.WriteLine("\t"); Console.Write("y[0,0]\t"); foreach (double[,] t in list_f_wx_plus_b_2) { Console.Write(t[0, 0].ToString("G3") + "\t"); } Console.WriteLine("\t"); Console.Write("y[1,0]\t"); foreach (double[,] t in list_f_wx_plus_b_2) { Console.Write(t[1, 0].ToString("G3") + "\t"); } Console.WriteLine("\n\n"); Console.WriteLine("y[0,0] , y[1,0]がともに0.5付近になりました。"); Console.WriteLine("明らかに、この答えは私たちが求めているものではありません。"); Console.WriteLine("この解は、誤差逆伝搬法の課題の1つである局所解です。"); Console.WriteLine("局所解は、計数行列w、バイアスベクトルbの取りうる範囲全体では損失関数が最小ではないが、その値近傍では損失関数が最小になる解のことです。"); Console.WriteLine("誤差逆伝搬法は損失関数が小さくなる方向にパラメータを更新する手法のため、局所的な最小値に引っ掛かりやすいです。"); Console.WriteLine("モーメンタム法などの対策法はありますが、効果的なのはデータの前処理でしょう。"); Console.WriteLine("今回の場合は、2クラス分類なので、Demo007のようにSigmoid関数の回帰分析問題として扱った方が、正解に近づけたでしょう。"); }
public void Demo_011_Compare_NN_SVM_LR() { Console.WriteLine("ニューラルネットワーク、サポートベクトルマシン、線形回帰を比較します。"); Console.WriteLine("WriteLineはPCの処理能力を使うので、裏ですべて計算してから、結果をお見せします。"); //計算時間 DateTime start_NN; DateTime finish_NN; TimeSpan span_NN_epoch_100k; DateTime start_SVM; DateTime finish_SVM; TimeSpan span_SVM; DateTime start_LR; DateTime finish_LR; TimeSpan span_LR; List <double[, ]> list_x = new List <double[, ]>(); List <double[, ]> list_t = new List <double[, ]>(); for (int j = 0; j < 9; j++) { double[,] x = new double[1, 1]; x[0, 0] = j; list_x.Add(x); double[,] t = new double[1, 1]; if (j < 3 || 5 < j) { t[0, 0] = -1; } else { t[0, 0] = 1; } list_t.Add(t); } //Console.WriteLine("次は、2層のNeural Networkで試してみます"); //Console.WriteLine("第1層の計数行列はwは2行1列の行列とします"); //Console.WriteLine("活性化関数はSigmoid関数とします"); double[,] w_1 = new double[2, 1]; w_1[0, 0] = 10; w_1[1, 0] = -10; //Console.WriteLine("第1層の計数行列 w"); //this.Show_Matrix_Element(w_1); double[,] b_1 = new double[2, 1]; b_1[0, 0] = -35; b_1[1, 0] = 55; //Console.WriteLine("第1層のバイアスベクトル b"); //this.Show_Matrix_Element(b_1); //第一層は隠れ層なので、別のクラスです Hidden_Layer hd_1 = new Hidden_Layer(); hd_1.Preset_1_4th_Set_w(w_1); hd_1.Preset_2_4th_Set_b(b_1); hd_1.Preset_3_4th_Set_Hyper_Parameter(0.01, 0, 0, 0); hd_1.Preset_4_4th_Set_activation_Function(new Sigmoid_IFunction()); //Console.WriteLine("\n"); //Console.WriteLine("第2層の計数行列はwは1行2列の行列とします"); //Console.WriteLine("活性化関数は、第2層はHyperbolic_Tangent関数とします"); double[,] w_2 = new double[1, 2]; w_2[0, 0] = 20; w_2[0, 1] = 20; //Console.WriteLine("第2層の計数行列 w"); //this.Show_Matrix_Element(w_2); double[,] b_2 = new double[1, 1]; b_2[0, 0] = -30; //Console.WriteLine("第2層のバイアスベクトル b"); //this.Show_Matrix_Element(b_2); //Console.WriteLine("\n\n"); Regression_Final_Layer rfl_2 = new Regression_Final_Layer(); rfl_2.Preset_1_4th_Set_w(w_2); rfl_2.Preset_2_4th_Set_b(b_2); rfl_2.Preset_3_4th_Set_Hyper_Parameter(0.001, 0, 0, 0); rfl_2.Preset_4_4th_Set_activation_Function(new Hyperbolic_Tangent_IFunction()); //Console.WriteLine("\n\n"); int epoch = 1000; double[] error = new double[list_x.Count]; //double max_error = 0; //int max_k = 0; start_NN = DateTime.Now; for (int j = 0; j < list_x.Count * epoch; j++) { /* * //Console.WriteLine("epoch" + "\t" + j); * for (int k = 0; k < list_x.Count; k++) * { * hd_1.Step_1_3rd_Forward_Propagation(list_x[k]); * rfl_2.Step_1_3rd_Forward_Propagation(hd_1.Get_f_wx_plus_b()); * error[k] = Math.Abs(rfl_2.Get_f_wx_plus_b()[0, 0] - list_t[k][0, 0]); * //Console.WriteLine(k + "\t" + error[k].ToString("G3") + "\t" + list_t[k][0, 0] + "\t" + rfl_2.Get_f_wx_plus_b()[0, 0].ToString("G3") + "\t" + hd_1.Get_f_wx_plus_b()[0, 0].ToString("G3") + "\t" + hd_1.Get_f_wx_plus_b()[1, 0].ToString("G3")); * } * * max_k = 0; * max_error = error[0]; * for (int k = 1; k < list_x.Count; k++) * { * if (max_error < error[k]) * { * max_error = error[k]; * max_k = k; * } * } * //Console.WriteLine("Max error is No." + max_k); * //Console.WriteLine(" "); */ //順伝搬 hd_1.Step_1_3rd_Forward_Propagation(list_x[j % list_x.Count]); rfl_2.Step_1_3rd_Forward_Propagation(hd_1.Get_f_wx_plus_b()); //逆伝搬 rfl_2.Step_2_3rd_Calculate_Target_Function_and_Delta(list_t[j % list_t.Count]); //hd_1.Step_2_3rd_Calculate_Delta(rfl_2.Get_w(), rfl_2.Get_delta()); //パラメータの更新 rfl_2.Step_3_3rd_Update(); //hd_1.Step_3_3rd_Update(); } finish_NN = DateTime.Now; span_NN_epoch_100k = finish_NN - start_NN; double[,] y_NN = new double[list_x.Count, 1]; for (int j = 0; j < 9; j++) { //順伝搬 hd_1.Step_1_3rd_Forward_Propagation(list_x[j]); rfl_2.Step_1_3rd_Forward_Propagation(hd_1.Get_f_wx_plus_b()); y_NN[j, 0] = rfl_2.Get_f_wx_plus_b()[0, 0]; } //SVM、線形回帰の計画行列を定義します。 double[,] X = new double[9, 1]; double[,] t_vec = new double[9, 1]; //Console.WriteLine("\t" + "入力x\t" + "教師t"); for (int j = 0; j < 9; j++) { X[j, 0] = j; if (j < 3 || 5 < j) { t_vec[j, 0] = -1; } else { t_vec[j, 0] = 1; } //Console.WriteLine("\t" + X[j, 0] + "\t" + t_vec[j, 0] + ""); } //SVMの学習です start_SVM = DateTime.Now; double[,] variance_covariance = Design_Matrix.Variance_Covariance_Matrix(X); //係数Aを学習する double[,] Coefficient_A = Support_Vector_Machine.Learned_Coefficient_A(t_vec, X, new Power_of_10_IKernel(), variance_covariance); finish_SVM = DateTime.Now; span_SVM = finish_SVM - start_SVM; double[,] classified = Support_Vector_Machine.Classification_Design_Matrix(t_vec, X, new Power_of_10_IKernel(), variance_covariance, Coefficient_A, X); //線形回帰用の計画行列です。 double[,] phi_X = new double[9, 3]; //Console.WriteLine("\t" + "教師t" + "\t" + "exp(-(x-0.5)^2/2 )/√2π" + "\t" + "exp(-(x-2.5)^2/2 )/√2π" + "\t" + "exp(-(x-4.5)^2/2 )/√2π"); for (int j = 0; j < 9; j++) { phi_X[j, 0] = Math.Exp(-(X[j, 0] - 1) * (X[j, 0] - 1) / 2.0) / Math.Sqrt(2 * Math.PI); phi_X[j, 1] = Math.Exp(-(X[j, 0] - 4) * (X[j, 0] - 4) / 2.0) / Math.Sqrt(2 * Math.PI); phi_X[j, 2] = Math.Exp(-(X[j, 0] - 7) * (X[j, 0] - 7) / 2.0) / Math.Sqrt(2 * Math.PI); //Console.WriteLine("\t" + t_vec[j, 0] + "\t" + phi_X[j, 0].ToString("G2") + "\t\t\t\t" + phi_X[j, 1].ToString("G2") + "\t\t\t\t" + phi_X[j, 2].ToString("G2")); } start_LR = DateTime.Now; double[,] w = Liner_Regression.Learning_parameter_w_column_vector(phi_X, t_vec); finish_LR = DateTime.Now; span_LR = finish_LR - start_LR; double[,] y_LR = Liner_Regression.Regression_Design_Matrix(phi_X, w); Console.WriteLine("入力x" + "\t" + "答えt" + "\t" + "NN" + "\t" + "SVM" + "\t" + "LR"); for (int j = 0; j < 9; j++) { Console.Write(X[j, 0] + "\t"); Console.Write(t_vec[j, 0] + "\t"); Console.Write(y_NN[j, 0].ToString("G2") + "\t"); Console.Write(classified[j, 0].ToString("G2") + "\t"); Console.Write(y_LR[j, 0].ToString("G2") + "\t"); Console.WriteLine(" "); } Console.WriteLine(" "); Console.WriteLine("NNの計算時間\t\t" + span_NN_epoch_100k.Minutes + "分" + span_NN_epoch_100k.Seconds + "秒"); Console.WriteLine("SVMの計算時間\t\t" + span_SVM.Minutes + "分" + span_SVM.Seconds + "秒"); Console.WriteLine("LRの計算時間\t\t" + span_LR.Minutes + "分" + span_LR.Seconds + "秒"); Console.WriteLine("\n\n" + "NNの精度が良くないですね。"); Console.WriteLine("計数行列wの初期値や、学習回数、ハイパーパラメータなどを調整したのですが、これが限界でした。"); }
public void Demo_007_Neural_Network_3() { Console.WriteLine("Neural Networkのデモンストレーションです"); Console.WriteLine("今回は1層では学習できない例を紹介します\n\n"); //教師データを設定します。 //簡単のためf(x)の一次元とします。 List <double[, ]> list_x = new List <double[, ]>(); List <double[, ]> list_t = new List <double[, ]>(); for (int j = 0; j < 6; j++) { double[,] x = new double[1, 1]; x[0, 0] = j * 1.0; list_x.Add(x); double[,] t = new double[1, 1]; if (j < 2 || 3 < j) { t[0, 0] = 1; } else { t[0, 0] = 0; } list_t.Add(t); } Console.WriteLine("今回の入力値xと、答えtです"); Console.WriteLine("x < 2 , 3 < x のとき、答えが1になります。それ以外の場合、0です。"); Console.Write("x\t"); foreach (double[,] x in list_x) { Console.Write(x[0, 0] + "\t"); } Console.WriteLine("\t"); Console.Write("t\t"); foreach (double[,] t in list_t) { Console.Write(t[0, 0] + "\t"); } Console.WriteLine("\n\n"); Console.WriteLine("まずは1層のNeural Networkで試してみます"); Console.WriteLine("計数行列Wとバイアスベクトルbを設定します。"); double[,] W_f = new double[1, 1]; W_f[0, 0] = 3; double[,] b_f = new double[1, 1]; b_f[0, 0] = 2; Console.WriteLine("W[0,0] = " + W_f[0, 0] + "\t" + "b[0,0] = " + b_f[0, 0] + "\n\n"); Console.WriteLine("Neural Netoworkを設定しました。今回の活性化関数はSigmoid関数とします"); Console.WriteLine("ハイパーパラメータの値は0.1とします。\n"); Regression_Final_Layer rfl = new Regression_Final_Layer(); rfl.Preset_1_4th_Set_w(W_f); rfl.Preset_2_4th_Set_b(b_f); rfl.Preset_3_4th_Set_Hyper_Parameter(0.3, 0, 0.01, 0); rfl.Preset_4_4th_Set_activation_Function(new Sigmoid_IFunction()); Console.WriteLine("学習前のNeural Networkの出力を確認します。"); List <double[, ]> list_wx_plus_b = new List <double[, ]>(); List <double[, ]> list_f_wx_plus_b = new List <double[, ]>(); List <double[, ]> list_delta = new List <double[, ]>(); for (int j = 0; j < list_x.Count; j++) { rfl.Step_1_3rd_Forward_Propagation(list_x[j]); list_wx_plus_b.Add(rfl.Get_wx_plus_b()); list_f_wx_plus_b.Add(rfl.Get_f_wx_plus_b()); } Console.WriteLine("W " + rfl.Get_w()[0, 0] + "\t" + "b " + rfl.Get_b()[0, 0]); Console.Write("x\t\t"); foreach (double[,] x in list_x) { Console.Write(x[0, 0] + "\t"); } Console.WriteLine(" "); Console.Write("wx + b\t\t"); foreach (double[,] x in list_wx_plus_b) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.WriteLine(" "); Console.Write("f(wx + b)\t"); foreach (double[,] x in list_f_wx_plus_b) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.WriteLine(" "); Console.Write("t\t\t"); foreach (double[,] t in list_t) { Console.Write(t[0, 0] + "\t"); } Console.WriteLine("\n\n"); Console.WriteLine("100,000 epoch 学習しました。"); int epoch = 100000; for (int j = 0; j < list_x.Count * epoch; j++) { rfl.Step_1_3rd_Forward_Propagation(list_x[j % list_x.Count]); rfl.Step_2_3rd_Calculate_Target_Function_and_Delta(list_t[j % list_x.Count]); rfl.Step_3_3rd_Update(); } list_wx_plus_b = new List <double[, ]>(); list_f_wx_plus_b = new List <double[, ]>(); list_delta = new List <double[, ]>(); for (int j = 0; j < list_x.Count; j++) { rfl.Step_1_3rd_Forward_Propagation(list_x[j]); list_wx_plus_b.Add(rfl.Get_wx_plus_b()); list_f_wx_plus_b.Add(rfl.Get_f_wx_plus_b()); rfl.Step_2_3rd_Calculate_Target_Function_and_Delta(list_t[j]); list_delta.Add(rfl.Get_delta()); } Console.WriteLine("W " + rfl.Get_w()[0, 0] + "\t" + "b " + rfl.Get_b()[0, 0]); Console.Write("x\t\t"); foreach (double[,] x in list_x) { Console.Write(x[0, 0] + "\t"); } Console.WriteLine(" "); Console.Write("wx + b\t\t"); foreach (double[,] x in list_wx_plus_b) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.WriteLine(" "); Console.Write("f(wx + b)\t"); foreach (double[,] x in list_f_wx_plus_b) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.WriteLine(" "); Console.Write("t\t\t"); foreach (double[,] t in list_t) { Console.Write(t[0, 0] + "\t"); } Console.WriteLine(" "); Console.WriteLine("\n\n\n"); Console.WriteLine("次は、2層のNeural Networkで試してみます"); Console.WriteLine("第1層の計数行列はwは2行1列の行列とします"); Console.WriteLine("活性化関数はSwish関数とします"); double[,] w_1 = new double[2, 1]; w_1[0, 0] = 3; w_1[1, 0] = -3; Console.WriteLine("第1層の計数行列 w"); this.Show_Matrix_Element(w_1); double[,] b_1 = new double[2, 1]; b_1[0, 0] = 2; b_1[1, 0] = -2; Console.WriteLine("第1層のバイアスベクトル b"); this.Show_Matrix_Element(b_1); //第一層は隠れ層なので、別のクラスです Hidden_Layer hd_1 = new Hidden_Layer(); hd_1.Preset_1_4th_Set_w(w_1); hd_1.Preset_2_4th_Set_b(b_1); hd_1.Preset_3_4th_Set_Hyper_Parameter(0.15, 0, 0, 0); hd_1.Preset_4_4th_Set_activation_Function(new Swish_IFunction()); Console.WriteLine("\n"); Console.WriteLine("第2層の計数行列はwは1行2列の行列とします"); Console.WriteLine("活性化関数は、第2層はSigmoid関数とします"); double[,] w_2 = new double[1, 2]; w_2[0, 0] = 3; w_2[0, 1] = -3; Console.WriteLine("第2層の計数行列 w"); this.Show_Matrix_Element(w_2); double[,] b_2 = new double[1, 1]; b_2[0, 0] = 2; Console.WriteLine("第2層のバイアスベクトル b"); this.Show_Matrix_Element(b_2); Console.WriteLine("\n\n"); Regression_Final_Layer rfl_2 = new Regression_Final_Layer(); rfl_2.Preset_1_4th_Set_w(w_2); rfl_2.Preset_2_4th_Set_b(b_2); rfl_2.Preset_3_4th_Set_Hyper_Parameter(0.15, 0, 0, 0); rfl_2.Preset_4_4th_Set_activation_Function(new Sigmoid_IFunction()); Console.WriteLine("\n\n"); Console.WriteLine("学習前のNeural Networkの出力を確認します。"); List <double[, ]> list_wx_plus_b_1 = new List <double[, ]>(); List <double[, ]> list_f_wx_plus_b_1 = new List <double[, ]>(); List <double[, ]> list_wx_plus_b_2 = new List <double[, ]>(); List <double[, ]> list_f_wx_plus_b_2 = new List <double[, ]>(); for (int j = 0; j < list_x.Count; j++) { //順伝搬 hd_1.Step_1_3rd_Forward_Propagation(list_x[j]); rfl_2.Step_1_3rd_Forward_Propagation(hd_1.Get_f_wx_plus_b()); list_wx_plus_b_1.Add(hd_1.Get_wx_plus_b()); list_f_wx_plus_b_1.Add(hd_1.Get_f_wx_plus_b()); list_wx_plus_b_2.Add(rfl_2.Get_wx_plus_b()); list_f_wx_plus_b_2.Add(rfl_2.Get_f_wx_plus_b()); } Console.WriteLine("第1層のW "); this.Show_Matrix_Element(hd_1.Get_w()); Console.WriteLine("第1層のb "); this.Show_Matrix_Element(hd_1.Get_b()); Console.Write("x\t\t\t"); foreach (double[,] x in list_x) { Console.Write(x[0, 0] + "\t"); } Console.WriteLine(" "); Console.Write("第1層の(wx + b)\t\t"); foreach (double[,] x in list_wx_plus_b_1) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.Write("\n \t\t\t"); foreach (double[,] x in list_wx_plus_b_1) { Console.Write(x[1, 0].ToString("G3") + "\t"); } Console.WriteLine(" "); Console.Write("第1層のf(wx + b)\t"); foreach (double[,] x in list_f_wx_plus_b_1) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.Write("\n \t\t\t"); foreach (double[,] x in list_f_wx_plus_b_1) { Console.Write(x[1, 0].ToString("G3") + "\t"); } Console.WriteLine("\n"); Console.WriteLine("第2層のW "); this.Show_Matrix_Element(rfl_2.Get_w()); Console.WriteLine("第2層のb "); this.Show_Matrix_Element(rfl_2.Get_b()); Console.Write("第2層の(wx + b)\t\t"); foreach (double[,] x in list_wx_plus_b_2) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.WriteLine(" "); Console.Write("第2層のf(wx + b)\t"); foreach (double[,] x in list_f_wx_plus_b_2) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.WriteLine(" "); Console.Write("t\t\t\t"); foreach (double[,] t in list_t) { Console.Write(t[0, 0] + "\t"); } Console.WriteLine("\n\n"); Console.WriteLine("100,000 epoch 学習しました。"); epoch = 100000; for (int j = 0; j < list_x.Count * epoch; j++) { //順伝搬 hd_1.Step_1_3rd_Forward_Propagation(list_x[j % list_x.Count]); rfl_2.Step_1_3rd_Forward_Propagation(hd_1.Get_f_wx_plus_b()); //逆伝搬 rfl_2.Step_2_3rd_Calculate_Target_Function_and_Delta(list_t[j % list_x.Count]); hd_1.Step_2_3rd_Calculate_Delta(rfl_2.Get_w(), rfl_2.Get_delta()); //パラメータの更新 rfl_2.Step_3_3rd_Update(); hd_1.Step_3_3rd_Update(); } list_wx_plus_b_1 = new List <double[, ]>(); list_f_wx_plus_b_1 = new List <double[, ]>(); list_wx_plus_b_2 = new List <double[, ]>(); list_f_wx_plus_b_2 = new List <double[, ]>(); for (int j = 0; j < list_x.Count; j++) { //順伝搬 hd_1.Step_1_3rd_Forward_Propagation(list_x[j]); rfl_2.Step_1_3rd_Forward_Propagation(hd_1.Get_f_wx_plus_b()); list_wx_plus_b_1.Add(hd_1.Get_wx_plus_b()); list_f_wx_plus_b_1.Add(hd_1.Get_f_wx_plus_b()); list_wx_plus_b_2.Add(rfl_2.Get_wx_plus_b()); list_f_wx_plus_b_2.Add(rfl_2.Get_f_wx_plus_b()); } Console.WriteLine("第1層のW "); this.Show_Matrix_Element(hd_1.Get_w()); Console.WriteLine("第1層のb "); this.Show_Matrix_Element(hd_1.Get_b()); Console.Write("x\t\t\t"); foreach (double[,] x in list_x) { Console.Write(x[0, 0] + "\t"); } Console.WriteLine(" "); Console.Write("第1層の(wx + b)\t\t"); foreach (double[,] x in list_wx_plus_b_1) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.Write("\n \t\t\t"); foreach (double[,] x in list_wx_plus_b_1) { Console.Write(x[1, 0].ToString("G3") + "\t"); } Console.WriteLine(" "); Console.Write("第1層のf(wx + b)\t"); foreach (double[,] x in list_f_wx_plus_b_1) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.Write("\n \t\t\t"); foreach (double[,] x in list_f_wx_plus_b_1) { Console.Write(x[1, 0].ToString("G3") + "\t"); } Console.WriteLine("\n"); Console.WriteLine("第2層のW "); this.Show_Matrix_Element(rfl_2.Get_w()); Console.WriteLine("第2層のb "); this.Show_Matrix_Element(rfl_2.Get_b()); Console.Write("第2層の(wx + b)\t\t"); foreach (double[,] x in list_wx_plus_b_2) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.WriteLine(" "); Console.Write("第2層のf(wx + b)\t"); foreach (double[,] x in list_f_wx_plus_b_2) { Console.Write(x[0, 0].ToString("G3") + "\t"); } Console.WriteLine(" "); Console.Write("t\t\t\t"); foreach (double[,] t in list_t) { Console.Write(t[0, 0] + "\t"); } Console.WriteLine("\n\n"); }