/// <summary> /// /// </summary> /// <param name="theta"></param> /// <param name="x"></param> /// <param name="y"></param> /// <param name="lambda"></param> /// <returns>Item1 - cost, Item2 - grad</returns> public Tuple <double, double[]> CostFunction() { int m = y.Length; int n = Theta.Length; double[] one_minus_y = MLMath.ItemCalculate(1, y, (a, b) => a - b); double[] h = H(); double[] log_h = MLMath.ItemCalculate(h, i => Math.Log(i)); double[] one_minus_h = MLMath.ItemCalculate(1, h, (a, b) => a - b); double[] log_1_minus_h = MLMath.ItemCalculate(one_minus_h, i => Math.Log(i)); double cost = (1.0 / m) * (-MLMath.ItemMultiplyAndSum(y, log_h) - MLMath.ItemMultiplyAndSum(one_minus_y, log_1_minus_h)); //Regularization double[] theta_reg = new double[n]; theta_reg[0] = 0; for (int i = 1; i < n; i++) { theta_reg[i] = Theta[i]; } double reg = Lambda / (2 * m) * (MLMath.ItemMultiplyAndSum(theta_reg, theta_reg)); cost += reg; //Gradient double[] grad = new double[n]; for (int i = 0; i < n; i++) { double[] x_i_vector = MLMath.GetVector(x, 1, i); grad[i] = -Alpha * ((1.0 / m) * MLMath.ItemMultiplyAndSum(x_i_vector, MLMath.Minus(h, y)) + Lambda / m * theta_reg[i]); } return(new Tuple <double, double[]>(cost, grad)); }
/// <summary> /// /// </summary> /// <param name="times"></param> /// <returns>The cost of each iteration</returns> public List <BinaryClassificationTrainResult> Train(int times) { List <BinaryClassificationTrainResult> results = new List <BinaryClassificationTrainResult>(); //double[] costs = new double[times]; sampleCount = samples.Count; x = new double[sampleCount, featureCount_plus_1]; y = new double[sampleCount]; Theta = MLMath.GetRandomVector(featureCount_plus_1, 0, 1); for (int i = 0; i < sampleCount; i++) { x[i, 0] = 1; for (int j = 0; j < featureCount_plus_1 - 1; j++) { x[i, j + 1] = samples[i].X[j]; } y[i] = samples[i].Y; } for (int i = 0; i < times; i++) { Tuple <double, double[]> costFunction = CostFunction(); Theta = MLMath.Add(Theta, costFunction.Item2); BinaryClassificationTrainResult result = new BinaryClassificationTrainResult(); result.Cost = costFunction.Item1; result.Theta = Theta; results.Add(result); } return(results); }
public LogisticClassifier(int featureNumber, double alpha = 0.1, double lambda = 0) { this.Alpha = alpha; this.Lambda = lambda; featureCount_plus_1 = featureNumber + 1; Theta = MLMath.GetRandomVector(featureCount_plus_1, 0, 1); }
public void MC_ExamResult_Test() { MultiLogisticClassifier regr = new MultiLogisticClassifier(5, 3); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 0, 1, 0, 1 }, new double[] { 0, 1, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 1, 0, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 1, 0, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 1, 0, 0, 1, 0 }, new double[] { 1, 0, 0 }); regr.AddSample(new double[] { 1, 0, 0, 1, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 0, 1 }); regr.AddSample(new double[] { 1, 0, 0, 0, 0 }, new double[] { 0, 1, 0 }); int times = 1000; regr.Alpha = 3; regr.Lambda = 0.01; var results = regr.Train(times); for (int i = 0; i < times; i++) { if (i % 100 != 0) { continue; } Debug.Print(results[i].Cost.ToString()); for (int t = 0; t < results[i].Theta.GetLength(0); t++) { string theta_string = string.Empty; for (int s = 0; s < results[i].Theta.GetLength(1); s++) { theta_string += results[i].Theta[t, s].ToString() + ", "; } Debug.Print("\t" + theta_string); } } Assert.IsTrue(MLMath.AreEqual(regr.Predict(new double[] { 0, 1, 0, 1, 0 }), new double[] { 1, 0, 0 })); Assert.IsTrue(MLMath.AreEqual(regr.Predict(new double[] { 0, 0, 1, 0, 1 }), new double[] { 0, 1, 0 })); Assert.IsTrue(MLMath.AreEqual(regr.Predict(new double[] { 1, 0, 0, 1, 0 }), new double[] { 1, 0, 0 })); Assert.IsTrue(MLMath.AreEqual(regr.Predict(new double[] { 1, 0, 0, 0, 0 }), new double[] { 0, 0, 1 })); }
public void GetRandomVectorTest() { double[] r = MLMath.GetRandomVector(10, 0, 1); foreach (double d in r) { Debug.Print(d.ToString()); } Assert.AreNotEqual(r[0], r[1]); Assert.AreNotEqual(r[2], r[3]); Assert.AreNotEqual(r[3], r[1]); Assert.AreNotEqual(r[5], r[1]); }
public double[] H() { int m = x.GetLength(0); double[] z = new double[m]; for (int i = 0; i < m; i++) { double[] x_vector = MLMath.GetVector(x, 0, i); z[i] = MLMath.ItemMultiplyAndSum(Theta, x_vector); } return(MLMath.ItemCalculate(z, c => MLMath.Sigmoid(c))); }
private static Matrix[] GetThetaList(int[] hiddenLayers, int featureCount) { var thetaList = new Matrix[hiddenLayers.Length + 1]; thetaList[0] = MLMath.GetRandomMatrix(hiddenLayers[0], featureCount + 1, 0, 1); // int previousLayerNeuronCount = FeatureCount; for (int layer = 1; layer < hiddenLayers.Length; layer++) { thetaList[layer] = MLMath.GetRandomMatrix(hiddenLayers[layer], hiddenLayers[layer - 1] + 1, 0, 1); // previousLayerNeuronCount = HiddenLayers[layer]; } //last thetaList[hiddenLayers.Length] = MLMath.GetRandomMatrix(1, hiddenLayers.Last() + 1, 0, 1); return(thetaList); }
private MLNumericParam debugFilter(DebugableFloat param_) { DebugableFloat param = param_; param = param * scale + add; param.debug(); switch (type) { //DEBUG case FilterType.PASS_AS_IS: default: return((float)param_); case FilterType.FLOAT_TO_BOOL: param = Mathf.Abs((float)param) > 0.001f ? 1f : 0f; break; case FilterType.SCALE_ADD: break; case FilterType.SCALE_ADD_MOD: param = MLMath.fmod(param, mod); break; case FilterType.SCALE_ADD_CLAMP: param = Mathf.Clamp(param, clampRange.min, clampRange.max); break; case FilterType.SCALE_ADD_MOD_ROUND: param = Mathf.Round(MLMath.fmod(param, mod)); break; case FilterType.SCALE_ADD_MOD_FLOOR: param = Mathf.Floor(MLMath.fmod(param, mod)); break; case FilterType.SCALE_ADD_MOD_CEIL: param = Mathf.Ceil(MLMath.fmod(param, mod)); break; case FilterType.SCALE_ADD_MOD_OFFSET: param = MLMath.fmod(param, mod) + offset; break; //DEBUG } return((float)param); }
private void playAudioFor(float destination) { if (MLMath.SomewhatCloseValues(0f, destination)) { if (!string.IsNullOrEmpty(reachedEndAudio)) { AudioManager.Instance.play(reachedStartAudio); } } else if (MLMath.SomewhatCloseValues(1f, destination)) { if (!string.IsNullOrEmpty(reachedStartAudio)) { AudioManager.Instance.play(reachedEndAudio); } } }
public MLNumericParam filter(MLNumericParam param) { if (debug) { return(debugFilter((float)param)); } switch (type) { case FilterType.PASS_AS_IS: default: return(param); case FilterType.FLOAT_TO_BOOL: return(param.Bool ? 1f : 0f); case FilterType.SCALE_ADD: return(param * scale + add); case FilterType.SCALE_ADD_MOD: return(MLMath.fmod(param * scale + add, mod)); case FilterType.SCALE_ADD_CLAMP: return(Mathf.Clamp(param * scale + add, clampRange.min, clampRange.max)); case FilterType.SCALE_ADD_MOD_ROUND: return(Mathf.Round(MLMath.fmod(param * scale + add, mod))); case FilterType.SCALE_ADD_MOD_FLOOR: return(Mathf.Floor(MLMath.fmod(param * scale + add, mod))); case FilterType.SCALE_ADD_MOD_CEIL: return(Mathf.Ceil(MLMath.fmod(param * scale + add, mod))); case FilterType.SCALE_ADD_MOD_OFFSET: return(MLMath.fmod(param * scale + add, mod) + offset); case FilterType.FLOAT_TO_BOOL_INVERT: return(param.Bool ? 0f : 1f); case FilterType.TRUE_IF_EQUALS_AS_INT: return((int)param.value_ == intCompare ? 1f : 0f); } }
public double H(double[] x) { double z = MLMath.ItemMultiplyAndSum(Theta, x); return(MLMath.Sigmoid(z)); }
public void TrainTest() { List <double[]> x = FeatureExtension.LoadMatrix("F:\\x.txt"); List <double[]> y = FeatureExtension.LoadMatrix("F:\\y.txt"); MultiLogisticClassifier regr = new MultiLogisticClassifier(FeatureGenerator.FeatureCount, 6, 3, 0); int m = 1000; //x.Count(); for (int i = 0; i < m; i++) { regr.AddSample(x[i], y[i]); } int times = 2000; var results = regr.Train(times); for (int i = 0; i < times; i++) { if (i % 100 != 0) { continue; } Debug.Print(results[i].Cost.ToString()); for (int t = 0; t < results[i].Theta.GetLength(0); t++) { string theta_string = string.Empty; for (int s = 0; s < results[i].Theta.GetLength(1); s++) { theta_string += results[i].Theta[t, s].ToString() + ", "; } Debug.Print("\t" + theta_string); } } List <double[]> x_test = FeatureExtension.LoadMatrix("F:\\x_test.txt"); List <double[]> y_test = FeatureExtension.LoadMatrix("F:\\y_test.txt"); int m_test = 100; x_test.Count(); //List<double[]> x_test = x; //List<double[]> y_test = y; //int m_test = 100; //x_test.Count(); int correct_test = 0; int error_compare = 0; for (int i = 0; i < m_test; i++) { if (error_compare < 20) { double[] p1 = regr.Predict(x_test[i]); if (MLMath.AreEqual(p1, y[i])) { correct_test += 1; } else { double[] p2 = regr.PredictByPercentage(x_test[i]); Debug.Print(y[i].ToLine()); Debug.Print(p1.ToLine()); Debug.Print(p2.ToLine()); error_compare++; } } } double precise = correct_test * 1.0 / m_test; Debug.Print(precise.ToString()); var theta = results.Last().Theta; FeatureExtension.SaveMatrix(theta, "F:\\theta.txt"); }
public static double H(double[] x, double[] theta) { double z = MLMath.ItemMultiplyAndSum(x, theta); return(MLMath.Sigmoid(z)); }
public void SigmoidTest() { Debug.Print(MLMath.Sigmoid(5).ToString()); }