public void MC_ExamResult_Test() { MultiLogisticClassifier regr = new MultiLogisticClassifier(5, 3); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 0, 1, 0, 1 }, new double[] { 0, 1, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 1, 0, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 1, 0, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 1, 0, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 1, 0, 0, 1, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 1, 0 }); int times = 1000; regr.Alpha = 3; regr.Lambda = 0.01; var results = regr.Train(times); for (int i = 0; i < times; i++) { if (i % 100 != 0) { continue; } Debug.Print(results[i].Cost.ToString()); for (int t = 0; t < results[i].Theta.GetLength(0); t++) { string theta_string = string.Empty; for (int s = 0; s < results[i].Theta.GetLength(1); s++) { theta_string += results[i].Theta[t, s].ToString() + ", "; } Debug.Print("\t" + theta_string); } } Assert.IsTrue(MLMath.AreEqual(regr.Predict(new double[] { 0, 1, 0, 1, 0 }), new double[] { 1, 0, 0 })); Assert.IsTrue(MLMath.AreEqual(regr.Predict(new double[] { 0, 0, 1, 0, 1 }), new double[] { 0, 1, 0 })); Assert.IsTrue(MLMath.AreEqual(regr.Predict(new double[] { 1, 0, 0, 1, 0 }), new double[] { 1, 0, 0 })); Assert.IsTrue(MLMath.AreEqual(regr.Predict(new double[] { 1, 0, 0, 0, 0 }), new double[] { 0, 0, 1 })); }
public void TrainTest() { List <double[]> x = FeatureExtension.LoadMatrix("F:\\x.txt"); List <double[]> y = FeatureExtension.LoadMatrix("F:\\y.txt"); MultiLogisticClassifier regr = new MultiLogisticClassifier(FeatureGenerator.FeatureCount, 6, 3, 0); int m = 1000; //x.Count(); for (int i = 0; i < m; i++) { regr.AddSample(x[i], y[i]); } int times = 2000; var results = regr.Train(times); for (int i = 0; i < times; i++) { if (i % 100 != 0) { continue; } Debug.Print(results[i].Cost.ToString()); for (int t = 0; t < results[i].Theta.GetLength(0); t++) { string theta_string = string.Empty; for (int s = 0; s < results[i].Theta.GetLength(1); s++) { theta_string += results[i].Theta[t, s].ToString() + ", "; } Debug.Print("\t" + theta_string); } } List <double[]> x_test = FeatureExtension.LoadMatrix("F:\\x_test.txt"); List <double[]> y_test = FeatureExtension.LoadMatrix("F:\\y_test.txt"); int m_test = 100; x_test.Count(); //List<double[]> x_test = x; //List<double[]> y_test = y; //int m_test = 100; //x_test.Count(); int correct_test = 0; int error_compare = 0; for (int i = 0; i < m_test; i++) { if (error_compare < 20) { double[] p1 = regr.Predict(x_test[i]); if (MLMath.AreEqual(p1, y[i])) { correct_test += 1; } else { double[] p2 = regr.PredictByPercentage(x_test[i]); Debug.Print(y[i].ToLine()); Debug.Print(p1.ToLine()); Debug.Print(p2.ToLine()); error_compare++; } } } double precise = correct_test * 1.0 / m_test; Debug.Print(precise.ToString()); var theta = results.Last().Theta; FeatureExtension.SaveMatrix(theta, "F:\\theta.txt"); }