Exemplo n.º 1
0
        /// <summary>
        /// Fits the data to the specified function. The function is meant to have <b>p</b>+<b>v</b> parameters, the first <b>p</b> of which are to be fitted,
        /// whereas the remaining <b>v</b> are assumed to be independent variables, whose values are picked from the lists for the independent variables.
        /// </summary>
        /// <param name="f">the function to be fitted. First derivatives w.r.t. the fit parameters are needed.</param>
        /// <param name="fitparameters">the number of parameters to be fitted. They must be the first parameters to be passed to the function.</param>
        /// <param name="indep">the list of values for the independent variables.</param>
        /// <param name="dep">the list of values for the dependent variable.</param>
        /// <param name="deperr">the list of errors for the dependent variable.</param>
        /// <param name="maxiterations">maximum number of iterations to find the minimum.</param>
        /// <returns>the parameters of the fit.</returns>
        public double[] Fit(NumericalTools.Minimization.ITargetFunction f, int fitparameters, double[][] indep, double[] dep, double[] deperr, int maxiterations)
        {
            m_DegreesOfFreedom = dep.Length - fitparameters;
            if (m_DegreesOfFreedom < 0)
            {
                throw new NumericalTools.Minimization.MinimizationException("Degrees of freedom = " + m_DegreesOfFreedom + ". Aborting.");
            }
            NumericalTools.Minimization.NewtonMinimizer MA = new NumericalTools.Minimization.NewtonMinimizer();
            MA.Logger = m_TW;
            Chi2F chi2 = new Chi2F(f, fitparameters, indep, dep, deperr);

            MA.FindMinimum(chi2, maxiterations);
            m_EstimatedVariance = MA.Value / m_DegreesOfFreedom;
            m_BestFit           = MA.Point;
            Minimization.ITargetFunction[] g = new NumericalTools.Minimization.ITargetFunction[fitparameters];
            double[,] hessian = new double[fitparameters, fitparameters];
            int i, j;

            for (i = 0; i < fitparameters; i++)
            {
                g[i] = chi2.Derive(i);
                for (j = 0; j < fitparameters; j++)
                {
                    hessian[i, j] = g[i].Derive(j).Evaluate(m_BestFit);
                }
            }
            m_CorrelationMatrix = new double[fitparameters, fitparameters];
            double[][] c = new Cholesky(hessian, 0.0).Inverse(0.0);
            for (i = 0; i < fitparameters; i++)
            {
                for (j = 0; j < i; j++)
                {
                    m_CorrelationMatrix[j, i] = m_CorrelationMatrix[i, j] = c[i][j] / (Math.Sqrt(c[i][i]) * Math.Sqrt(c[j][j]));
                }
                m_CorrelationMatrix[i, j] = 1.0;
            }
            m_StandardErrors = new double[fitparameters];
            for (i = 0; i < fitparameters; i++)
            {
                m_StandardErrors[i] = Math.Sqrt(m_EstimatedVariance * c[i][i]);
            }
            return(m_BestFit);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Fits the data to the specified function. The function is meant to have <b>p</b>+<b>v</b> parameters, the first <b>p</b> of which are to be fitted,
        /// whereas the remaining <b>v</b> are assumed to be independent variables, whose values are picked from the lists for the independent variables.
        /// </summary>
        /// <param name="f">the function to be fitted. First derivatives w.r.t. the fit parameters are needed.</param>
        /// <param name="fitparameters">the number of parameters to be fitted. They must be the first parameters to be passed to the function.</param>
        /// <param name="indep">the list of values for the independent variables.</param>
        /// <param name="indeperr">the list of errors for the independent variable.</param>
        /// <param name="dep">the list of values for the dependent variable.</param>
        /// <param name="deperr">the list of errors for the dependent variable.</param>
        /// <param name="maxiterations">maximum number of iterations to find the minimum.</param>
        /// <returns>the parameters of the fit.</returns>
        /// <remarks>The method of effective variance is used to take errors on the independent variables into account. </remarks>
        public double[] Fit(NumericalTools.Minimization.ITargetFunction f, int fitparameters, double[][] indep, double[] dep, double [][] indeperr, double[] deperr, int maxiterations)
        {
            m_DegreesOfFreedom = dep.Length - fitparameters;
            if (m_DegreesOfFreedom < 0)
            {
                throw new NumericalTools.Minimization.MinimizationException("Degrees of freedom = " + m_DegreesOfFreedom + ". Aborting.");
            }
            NumericalTools.Minimization.NewtonMinimizer MA = new NumericalTools.Minimization.NewtonMinimizer();
            MA.Logger = m_TW;
            NumericalTools.Minimization.ITargetFunction[] f_d = new NumericalTools.Minimization.ITargetFunction[f.CountParams - fitparameters];
            int i, j;

            for (i = 0; i < f_d.Length; i++)
            {
                f_d[i] = f.Derive(i + fitparameters);
            }
            double [] c_deperr = new double[deperr.Length];
            double [] xp = new double[f.CountParams];
            double [] xfp = new double[fitparameters];
            double    dfx;

            for (i = 0; i < f_d.Length; i++)
            {
                xfp[i] = f.Start[i];
            }
            int maxouteriter = maxiterations;

            if (maxouteriter <= 0)
            {
                maxouteriter = -1;
            }
            double f0, f1, dxchange;

            do
            {
                if (m_TW != null)
                {
                    m_TW.WriteLine("Starting with derivative guess - remaining iterations: " + maxouteriter);
                }
                for (i = 0; i < f_d.Length; i++)
                {
                    xp[i] = xfp[i];
                }
                for (i = 0; i < c_deperr.Length; i++)
                {
                    for (j = 0; j < f_d.Length; j++)
                    {
                        xp[j + fitparameters] = indep[i][j];
                    }
                    c_deperr[i] = deperr[i] * deperr[i];
                    for (j = 0; j < f_d.Length; j++)
                    {
                        dfx          = f_d[j].Evaluate(xp) * indeperr[i][j];
                        c_deperr[i] += dfx * dfx;
                    }
                    c_deperr[i] = Math.Sqrt(c_deperr[i]);
                }
                Chi2F chi2 = new Chi2F(f, fitparameters, indep, dep, c_deperr);
                chi2.SetStart(xfp);
                f0 = chi2.Evaluate(xfp);
                MA.FindMinimum(chi2, maxiterations);
                m_EstimatedVariance = MA.Value / m_DegreesOfFreedom;
                m_BestFit           = MA.Point;
                Minimization.ITargetFunction[] g = new NumericalTools.Minimization.ITargetFunction[fitparameters];
                double[,] hessian = new double[fitparameters, fitparameters];
                for (i = 0; i < fitparameters; i++)
                {
                    g[i] = chi2.Derive(i);
                    for (j = 0; j < fitparameters; j++)
                    {
                        hessian[i, j] = g[i].Derive(j).Evaluate(m_BestFit);
                    }
                }
                m_CorrelationMatrix = new double[fitparameters, fitparameters];
                double[][] c = new Cholesky(hessian, 0.0).Inverse(0.0);
                for (i = 0; i < fitparameters; i++)
                {
                    for (j = 0; j < i; j++)
                    {
                        m_CorrelationMatrix[j, i] = m_CorrelationMatrix[i, j] = c[i][j] / (Math.Sqrt(c[i][i]) * Math.Sqrt(c[j][j]));
                    }
                    m_CorrelationMatrix[i, j] = 1.0;
                }
                m_StandardErrors = new double[fitparameters];
                for (i = 0; i < fitparameters; i++)
                {
                    m_StandardErrors[i] = Math.Sqrt(m_EstimatedVariance * c[i][i]);
                }
                dxchange = 0.0;
                for (i = 0; i < f_d.Length; i++)
                {
                    dxchange += (xfp[i] - m_BestFit[i]) * (xfp[i] - m_BestFit[i]);
                }
                f1 = chi2.Evaluate(m_BestFit);
                for (i = 0; i < xfp.Length; i++)
                {
                    xfp[i] = m_BestFit[i];
                }
                if (m_TW != null)
                {
                    m_TW.WriteLine("End with derivative guess - remaining iterations: " + maxouteriter);
                }
                if (--maxouteriter < 0)
                {
                    maxouteriter = -1;
                }
            }while (maxouteriter != 0 && f.StopMinimization(f1, f0, dxchange) == false);
            return(m_BestFit);
        }