private TuningParamsClass step_gradient(double b_current, double m_current, List <Point> points, double learningRate) { double b_gradient = 0; double m_gradient = 0; double N = double.Parse(points.Count.ToString()); foreach (Point p in points) { //In our function: /* * y'=logy, b=log a, x'=log x * When we found the values for b so the a will be retrieved: 10 power b = a * Then we can make our formula: y=a x power m * */ double pY = double.Parse(p.Y.ToString()); double pX = double.Parse(p.X.ToString()); //Remember: y'=logy, x'=log x pX = Math.Log10(pX); pY = Math.Log10(pY); b_gradient += -(2 / N) * (pY - ((m_current * pX) + b_current)); m_gradient += -(2 / N) * pX * (pY - ((m_current * pX) + b_current)); } TuningParamsClass tp = new TuningParamsClass(); tp.b = b_current - (learningRate * b_gradient); tp.m = m_current - (learningRate * m_gradient); return(tp); }
private TuningParamsClass gradient_descent_runner(List <Point> points, double starting_b, double starting_m, double learning_rate, int num_iterations) { TuningParamsClass tpFinal = new TuningParamsClass(); double b = starting_b; double m = starting_m; for (int i = 0; i <= num_iterations; i++) { litOutput.Text += "<BR/><BR/>Step:" + i.ToString() + ", b: [" + b.ToString() + "] m: [" + m.ToString() + "] " + DateTime.Now.TimeOfDay.ToString(); Debug.WriteLine("Step:" + i.ToString() + ", b: [" + b.ToString() + "] m: [" + m.ToString() + "] " + DateTime.Now.TimeOfDay.ToString()); tpFinal = step_gradient(b, m, points, learning_rate); b = tpFinal.b; m = tpFinal.m; litOutput.Text += "<br/><br/> > " + string.Format("error = {0}", compute_error_for_line_given_points(b, m, points)); Debug.WriteLine(" > " + string.Format("error = {0}", compute_error_for_line_given_points(b, m, points))); } return(tpFinal); }