public void Produto2500Teste() { LMAFunction f = new FuncaoGaussianaAcumulada(); double[] parametrosEstimados = new[] { 0.111989183, 0.735 }; #region Dados Experimentais double[] xExperimental = new[] { 0, 0.02, 0.04, 0.06, 0.08, 0.1, 0.12, 0.14, 0.16, 0.18, 0.2, 0.22, 0.24, 0.26, 0.28, 0.3, 0.32, 0.34, 0.36, 0.38, 0.4, 0.42, 0.44, 0.46, 0.48, 0.5, 0.52, 0.54, 0.56, 0.58, 0.6, 0.62, 0.64, 0.66, 0.68, 0.7, 0.72, 0.74, 0.76, 0.78, 0.8, 0.82, 0.84, 0.86, 0.88, 0.9, 0.92, 0.94, 0.96 }; double[] yExperimental = new[] { 0, 0, 0.03472, 0.06644, 0.09601, 0.12272, 0.14793, 0.17208, 0.19535, 0.21753, 0.23669, 0.25444, 0.27351, 0.29654, 0.32522, 0.35796, 0.39288, 0.42818, 0.46214, 0.4953, 0.52848, 0.56211, 0.59659, 0.63314, 0.67136, 0.70917, 0.74448, 0.77573, 0.80428, 0.83035, 0.85386, 0.87476, 0.89255, 0.90755, 0.92063, 0.93257, 0.94395, 0.95415, 0.96302, 0.97057, 0.97682, 0.98124, 0.98433, 0.98668, 0.98884, 0.9908, 0.9916, 0.99404, 0.99706 }; #endregion double[][] dataPoints = new double[2][]; dataPoints[0] = xExperimental; dataPoints[1] = yExperimental; Net.Kniaz.LMA.LMA algorithm = new Net.Kniaz.LMA.LMA(f, parametrosEstimados, dataPoints, null, new GeneralMatrix(2, 2), 1d - 30, 100); algorithm.Fit(); for (int i = 0; i < parametrosEstimados.Length; i++) { Trace.WriteLine("Parameter" + i.ToString() + " " + algorithm.Parameters[i].ToString()); } }
public void Produto1724Teste() { LMAFunction f = new FuncaoGaussianaAcumulada(); double[] parametrosEstimados = new[] { 0.111989183, 0.735 }; #region Dados Experimentais double[] xExperimental = new[] { 0, 0.02, 0.04, 0.06, 0.08, 0.1, 0.12, 0.14, 0.16, 0.18, 0.2, 0.22, 0.24, 0.26, 0.28, 0.3, 0.32, 0.34, 0.36, 0.38, 0.4, 0.42, 0.44, 0.46, 0.48, 0.5, 0.52, 0.54, 0.56, 0.58, 0.6, 0.62, 0.64, 0.66, 0.68, 0.7, 0.72, 0.74, 0.76, 0.78, 0.8, 0.82, 0.84, 0.86, 0.88, 0.9, 0.92, 0.94, 0.96 }; double[] yExperimental = new[] { 0, 0, 0.01558, 0.02839, 0.04047, 0.05091, 0.05979, 0.06895, 0.07986, 0.09401, 0.11091, 0.12986, 0.15061, 0.17297, 0.19653, 0.22114, 0.2478, 0.27755, 0.31163, 0.35184, 0.396, 0.44082, 0.48298, 0.52204, 0.55988, 0.59661, 0.63235, 0.66734, 0.70202, 0.73575, 0.7677, 0.79702, 0.82366, 0.84807, 0.87005, 0.88941, 0.90594, 0.91933, 0.92999, 0.93881, 0.94672, 0.95361, 0.95951, 0.96473, 0.96941, 0.97359, 0.97704, 0.97868, 0.97878 }; #endregion double[][] dataPoints = new double[2][]; dataPoints[0] = xExperimental; dataPoints[1] = yExperimental; Net.Kniaz.LMA.LMA algorithm = new Net.Kniaz.LMA.LMA(f, parametrosEstimados, dataPoints, null, new GeneralMatrix(2, 2), 1d - 30, 100); algorithm.Fit(); for (int i = 0; i < parametrosEstimados.Length; i++) { Trace.WriteLine("Parameter" + i.ToString() + " " + algorithm.Parameters[i].ToString()); } }
public void FeedTeste() { LMAFunction f = new FuncaoGaussianaAcumulada(); double[] parametrosEstimados = new[] { 0.111989183, 0.735 }; #region Dados Experimentais double[] xExperimental = new[] { 0, 0.02, 0.04, 0.06, 0.08, 0.1, 0.12, 0.14, 0.16, 0.18, 0.2, 0.22, 0.24, 0.26, 0.28, 0.3, 0.32, 0.34, 0.36, 0.38, 0.4, 0.42, 0.44, 0.46, 0.48, 0.5, 0.52, 0.54, 0.56, 0.58, 0.6, 0.62, 0.64, 0.66, 0.68, 0.7, 0.72, 0.74, 0.76, 0.78, 0.8, 0.82, 0.84, 0.86, 0.88, 0.9, 0.92, 0.94, 0.96 }; double[] yExperimental = new[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02532, 0.03191, 0.03557, 0.03855, 0.04199, 0.05473, 0.07439, 0.09963, 0.1324, 0.17218, 0.222, 0.2795, 0.34357, 0.41488, 0.48759, 0.55907, 0.62985, 0.69556, 0.75486, 0.8067, 0.85443, 0.89529, 0.9253, 0.94629, 0.96257, 0.97404, 0.98309 }; #endregion double[][] dataPoints = new double[2][]; dataPoints[0] = xExperimental; dataPoints[1] = yExperimental; Net.Kniaz.LMA.LMA algorithm = new Net.Kniaz.LMA.LMA(f, parametrosEstimados, dataPoints, null, new GeneralMatrix(parametrosEstimados.Length, parametrosEstimados.Length), 1d - 30, 100); algorithm.Fit(); for (int i = 0; i < parametrosEstimados.Length; i++) { Trace.WriteLine("Parameter" + i.ToString() + " " + algorithm.Parameters[i].ToString()); } }
public void Produto0952Teste() { LMAFunction f = new FuncaoGaussianaAcumulada(); double[] parametrosEstimados = new[] { 0.111989183, 0.735 }; #region Dados Experimentais double[] xExperimental = new[] { 0, 0.02, 0.04, 0.06, 0.08, 0.1, 0.12, 0.14, 0.16, 0.18, 0.2, 0.22, 0.24, 0.26, 0.28, 0.3, 0.32, 0.34, 0.36, 0.38, 0.4, 0.42, 0.44, 0.46, 0.48, 0.5, 0.52, 0.54, 0.56, 0.58, 0.6, 0.62, 0.64, 0.66, 0.68, 0.7, 0.72, 0.74, 0.76, 0.78, 0.8, 0.82, 0.84, 0.86, 0.88, 0.9, 0.92, 0.94, 0.96 }; double[] yExperimental = new[] { 0, 0.00608, 0.01391, 0.02146, 0.02825, 0.03357, 0.03764, 0.04171, 0.04676, 0.05379, 0.06234, 0.07212, 0.08319, 0.09557, 0.10882, 0.12271, 0.13817, 0.15615, 0.17769, 0.20325, 0.23188, 0.2624, 0.29354, 0.32523, 0.35833, 0.39221, 0.42622, 0.45978, 0.49319, 0.52665, 0.56012, 0.59358, 0.62763, 0.66244, 0.69672, 0.72918, 0.75868, 0.78573, 0.81087, 0.83445, 0.85676, 0.87774, 0.89717, 0.91506, 0.93141, 0.94569, 0.95773, 0.96857, 0.97905 }; #endregion double[][] dataPoints = new double[2][]; dataPoints[0] = xExperimental; dataPoints[1] = yExperimental; Net.Kniaz.LMA.LMA algorithm = new Net.Kniaz.LMA.LMA(f, parametrosEstimados, dataPoints, null, new GeneralMatrix(2, 2), 1d - 30, 100); algorithm.Fit(); for (int i = 0; i < parametrosEstimados.Length; i++) { Trace.WriteLine("Parameter" + i.ToString() + " " + algorithm.Parameters[i].ToString()); } }
public void Produto0383Teste() { LMAFunction f = new FuncaoGaussianaAcumulada(); double[] parametrosEstimados = new[] { 0.111989183, 0.735 }; #region Dados Experimentais double[] xExperimental = new[] { 0, 0.02, 0.04, 0.06, 0.08, 0.1, 0.12, 0.14, 0.16, 0.18, 0.2, 0.22, 0.24, 0.26, 0.28, 0.3, 0.32, 0.34, 0.36, 0.38, 0.4, 0.42, 0.44, 0.46, 0.48, 0.5, 0.52, 0.54, 0.56, 0.58, 0.6, 0.62, 0.64, 0.66, 0.68, 0.7, 0.72, 0.74, 0.76, 0.78, 0.8, 0.82, 0.84, 0.86, 0.88, 0.9, 0.92, 0.94, 0.96 }; double[] yExperimental = new[] { 0, 0, 0.00874, 0.0121, 0.01537, 0.01847, 0.02116, 0.02346, 0.02575, 0.02838, 0.03166, 0.0356, 0.03978, 0.04442, 0.04979, 0.05615, 0.06377, 0.07248, 0.08224, 0.09298, 0.10462, 0.11696, 0.12955, 0.14288, 0.15755, 0.17419, 0.1933, 0.21393, 0.23625, 0.26089, 0.28852, 0.31983, 0.35482, 0.39311, 0.43451, 0.47878, 0.52576, 0.57747, 0.63319, 0.69075, 0.74798, 0.802, 0.85845, 0.91109, 0.94528, 0.96223, 0.97008, 0.97396, 0.97589 }; #endregion double[][] dataPoints = new double[2][]; dataPoints[0] = xExperimental; dataPoints[1] = yExperimental; Net.Kniaz.LMA.LMA algorithm = new Net.Kniaz.LMA.LMA(f, parametrosEstimados, dataPoints, null, new GeneralMatrix(2, 2), 1d - 30, 100); algorithm.Fit(); for (int i = 0; i < parametrosEstimados.Length; i++) { Trace.WriteLine("Parameter" + i.ToString() + " " + algorithm.Parameters[i].ToString()); } }