public void FeedTeste() { LMAFunction f = new FuncaoGama(); double[] parametrosEstimados = new[] { 1.0, 10, 0.3 }; #region Dados Experimentais double[] xExperimental = new[] { 0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.43, 0.45, 0.47, 0.49, 0.51, 0.53, 0.55, 0.57, 0.59, 0.61, 0.63, 0.65, 0.67, 0.69, 0.71, 0.73, 0.75, 0.77, 0.79, 0.81, 0.83, 0.85, 0.87, 0.89, 0.91, 0.93, 0.95, 0.97, 1 }; double[] yExperimental = new[] { 0, 0.000477354, 0.000954709, 0.001432063, 0.001909417, 0.002386771, 0.002864126, 0.00334148, 0.003818834, 0.004105247, 0.00777, 0.00485, 0.0031, 0.00267, 0.00787, 0.01618, 0.02244, 0.02912, 0.03585, 0.04481, 0.05397, 0.0607, 0.06801, 0.07284, 0.0717, 0.07164, 0.0687, 0.0627, 0.05525, 0.04971, 0.04483, 0.03567, 0.02498, 0.01816, 0.01397, 0.00995, 0.00872, 0.01255 }; #endregion double[][] dataPoints = new double[2][]; dataPoints[0] = xExperimental; dataPoints[1] = yExperimental; Net.Kniaz.LMA.LMA algorithm = new Net.Kniaz.LMA.LMA(f, parametrosEstimados, dataPoints, null, new GeneralMatrix(3, 3), 1d - 30, 100); algorithm.Fit(); for (int i = 0; i < parametrosEstimados.Length; i++) { Trace.WriteLine("Parameter" + i.ToString() + " " + algorithm.Parameters[i].ToString()); } }
public void FeedAdam2012Teste() { LMAFunction f = new FuncaoGama(); double[] parametrosEstimados = new[] {110 ,6.0, 2.0, 10.0 }; #region Dados Experimentais double[] xExperimental = new[] { 6, 6.5, 7, 7.5, 8, 8.5, 9, 9.5, 10, 10.5, 11, 11.5, 12, 12.5, 13, 13.5, 14, 14.5, 15, 15.5, 16, 16.5, 17, 17.5, 18, 18.5, 19, 19.5, 20, 20.5, 21, 21.5, 22, 22.5, 23, 23.5, 24, 24.5, 25, 25.5, 26, 26.5, 27, 27.5, 28, 28.5, 29, 29.5, 30, 30.5, 31, 31.5, 32, 32.5, 33, 33.5, 34, 34.5, 35, 35.5, 36, 36.5, 37, 37.5, 38, 38.5, 39, 39.5, 40, 40.5, 41, 41.5, 42, 42.5, 43, 43.5, 44, 44.5, 45, 45.5, 46, 46.5, 47, 47.5, 48, 48.5, 49, 49.5, 50, 50.5, 51, 51.5, 52, 52.5, 53, 53.5, 54, 54.5, 55, 55.5, 56, 56.5, 57, 57.5, 58, 58.5, 59, 59.5, 60, 60.5, 61, 61.5, 62, 62.5, 63, 63.5, 64, 64.5, 65, 65.5, 66, 66.5, 67, 67.5, 68, 68.5, 69, 69.5 }; double[] yExperimental = new[] { 0.05038, 0.26068, 0.63623, 0.98765, 1.34988, 1.70814, 2.07049, 2.46031, 2.8452, 3.13967, 3.36847, 3.54668, 3.69239, 3.81627, 3.90255, 3.94637, 3.96881, 3.99722, 4.01619, 4.00553, 3.98141, 3.95254, 3.91177, 3.89293, 3.90277, 3.85827, 3.76256, 3.70522, 3.65271, 3.58838, 3.51878, 3.45647, 3.40999, 3.37829, 3.35271, 3.34001, 3.34441, 3.30962, 3.26256, 3.22453, 3.16822, 2.77531, 2.21383, 2.44192, 2.65645, 2.5645, 2.39322, 2.26306, 2.13443, 2.00384, 1.87456, 1.76221, 1.66783, 1.59939, 1.54273, 1.47008, 1.41341, 1.4354, 1.43696, 1.39685, 1.34782, 1.30362, 1.26077, 1.22248, 1.18638, 1.14808, 1.11094, 1.11799, 1.11113, 1.03607, 0.96056, 0.935, 0.95218, 0.95174, 0.92821, 0.90443, 0.87775, 0.80971, 0.72396, 0.72258, 0.74405, 0.669, 0.57887, 0.54087, 0.51708, 0.49078, 0.46682, 0.44668, 0.43057, 0.41281, 0.39397, 0.37357, 0.35218, 0.33123, 0.31164, 0.29456, 0.27949, 0.26638, 0.25576, 0.24709, 0.23974, 0.23362, 0.22827, 0.22338, 0.21878, 0.21436, 0.21001, 0.20563, 0.20107, 0.19675, 0.19276, 0.18892, 0.18502, 0.18077, 0.17552, 0.16389, 0.14573, 0.19372, 0.2433, 0.22274, 0.18939, 0.15281, 0.11896, 0.09803, 0.08244, 0.06956, 0.05915, 0.05214 }; #endregion double[][] dataPoints = new double[2][]; dataPoints[0] = xExperimental; dataPoints[1] = yExperimental; Net.Kniaz.LMA.LMA algorithm = new Net.Kniaz.LMA.LMA(f, parametrosEstimados, dataPoints, null, new GeneralMatrix(parametrosEstimados.Length, parametrosEstimados.Length), 1d - 30, 100); algorithm.Fit(); for (int i = 0; i < parametrosEstimados.Length; i++) { Trace.WriteLine("Parameter" + i.ToString() + " " + algorithm.Parameters[i].ToString()); } }