Ejemplo n.º 1
0
 public void nestedOptimizationTest()
 {
     //("Testing nested optimizations...");
     OptimizationBasedCostFunction optimizationBasedCostFunction = new OptimizationBasedCostFunction();
     NoConstraint constraint = new NoConstraint();
     Vector initialValues = new Vector(1, 0.0);
     Problem problem = new Problem(optimizationBasedCostFunction, constraint, initialValues);
     LevenbergMarquardt optimizationMethod = new LevenbergMarquardt();
     //Simplex optimizationMethod(0.1);
     //ConjugateGradient optimizationMethod;
     //SteepestDescent optimizationMethod;
     EndCriteria endCriteria = new EndCriteria(1000, 100, 1e-5, 1e-5, 1e-5);
     optimizationMethod.minimize(problem, endCriteria);
 }
Ejemplo n.º 2
0
 public override Vector values(Vector x)
 {
     // dummy nested optimization
     Vector coefficients = new Vector(3, 1.0);
     OneDimensionalPolynomialDegreeN oneDimensionalPolynomialDegreeN = new OneDimensionalPolynomialDegreeN(coefficients);
     NoConstraint constraint = new NoConstraint();
     Vector initialValues = new Vector(1, 100.0);
     Problem problem = new Problem(oneDimensionalPolynomialDegreeN, constraint, initialValues);
     LevenbergMarquardt optimizationMethod = new LevenbergMarquardt();
     //Simplex optimizationMethod(0.1);
     //ConjugateGradient optimizationMethod;
     //SteepestDescent optimizationMethod;
     EndCriteria endCriteria = new EndCriteria(1000, 100, 1e-5, 1e-5, 1e-5);
     optimizationMethod.minimize(problem, endCriteria);
     // return dummy result
     Vector dummy = new Vector(1,0);
     return dummy;
 }
Ejemplo n.º 3
0
        // Optimization function for hypersphere and lower-diagonal algorithm
        private static Matrix hypersphereOptimize(Matrix targetMatrix, Matrix currentRoot, bool lowerDiagonal)
        {
            int    i, j, k, size = targetMatrix.rows();
            Matrix result   = new Matrix(currentRoot);
            Vector variance = new Vector(size);

            for (i = 0; i < size; i++)
            {
                variance[i] = Math.Sqrt(targetMatrix[i, i]);
            }
            if (lowerDiagonal)
            {
                Matrix approxMatrix = result * Matrix.transpose(result);
                result = MatrixUtilities.CholeskyDecomposition(approxMatrix, true);
                for (i = 0; i < size; i++)
                {
                    for (j = 0; j < size; j++)
                    {
                        result[i, j] /= Math.Sqrt(approxMatrix[i, i]);
                    }
                }
            }
            else
            {
                for (i = 0; i < size; i++)
                {
                    for (j = 0; j < size; j++)
                    {
                        result[i, j] /= variance[i];
                    }
                }
            }

            ConjugateGradient       optimize     = new ConjugateGradient();
            EndCriteria             endCriteria  = new EndCriteria(100, 10, 1e-8, 1e-8, 1e-8);
            HypersphereCostFunction costFunction = new HypersphereCostFunction(targetMatrix, variance, lowerDiagonal);
            NoConstraint            constraint   = new NoConstraint();

            // hypersphere vector optimization

            if (lowerDiagonal)
            {
                Vector       theta = new Vector(size * (size - 1) / 2);
                const double eps   = 1e-16;
                for (i = 1; i < size; i++)
                {
                    for (j = 0; j < i; j++)
                    {
                        theta[i * (i - 1) / 2 + j] = result[i, j];
                        if (theta[i * (i - 1) / 2 + j] > 1 - eps)
                        {
                            theta[i * (i - 1) / 2 + j] = 1 - eps;
                        }
                        if (theta[i * (i - 1) / 2 + j] < -1 + eps)
                        {
                            theta[i * (i - 1) / 2 + j] = -1 + eps;
                        }
                        for (k = 0; k < j; k++)
                        {
                            theta[i * (i - 1) / 2 + j] /= Math.Sin(theta[i * (i - 1) / 2 + k]);
                            if (theta[i * (i - 1) / 2 + j] > 1 - eps)
                            {
                                theta[i * (i - 1) / 2 + j] = 1 - eps;
                            }
                            if (theta[i * (i - 1) / 2 + j] < -1 + eps)
                            {
                                theta[i * (i - 1) / 2 + j] = -1 + eps;
                            }
                        }
                        theta[i * (i - 1) / 2 + j] = Math.Acos(theta[i * (i - 1) / 2 + j]);
                        if (j == i - 1)
                        {
                            if (result[i, i] < 0)
                            {
                                theta[i * (i - 1) / 2 + j] = -theta[i * (i - 1) / 2 + j];
                            }
                        }
                    }
                }
                Problem p = new Problem(costFunction, constraint, theta);
                optimize.minimize(p, endCriteria);
                theta = p.currentValue();
                result.fill(1);
                for (i = 0; i < size; i++)
                {
                    for (k = 0; k < size; k++)
                    {
                        if (k > i)
                        {
                            result[i, k] = 0;
                        }
                        else
                        {
                            for (j = 0; j <= k; j++)
                            {
                                if (j == k && k != i)
                                {
                                    result[i, k] *= Math.Cos(theta[i * (i - 1) / 2 + j]);
                                }
                                else if (j != i)
                                {
                                    result[i, k] *= Math.Sin(theta[i * (i - 1) / 2 + j]);
                                }
                            }
                        }
                    }
                }
            }
            else
            {
                Vector       theta = new Vector(size * (size - 1));
                const double eps   = 1e-16;
                for (i = 0; i < size; i++)
                {
                    for (j = 0; j < size - 1; j++)
                    {
                        theta[j * size + i] = result[i, j];
                        if (theta[j * size + i] > 1 - eps)
                        {
                            theta[j * size + i] = 1 - eps;
                        }
                        if (theta[j * size + i] < -1 + eps)
                        {
                            theta[j * size + i] = -1 + eps;
                        }
                        for (k = 0; k < j; k++)
                        {
                            theta[j * size + i] /= Math.Sin(theta[k * size + i]);
                            if (theta[j * size + i] > 1 - eps)
                            {
                                theta[j * size + i] = 1 - eps;
                            }
                            if (theta[j * size + i] < -1 + eps)
                            {
                                theta[j * size + i] = -1 + eps;
                            }
                        }
                        theta[j * size + i] = Math.Acos(theta[j * size + i]);
                        if (j == size - 2)
                        {
                            if (result[i, j + 1] < 0)
                            {
                                theta[j * size + i] = -theta[j * size + i];
                            }
                        }
                    }
                }
                Problem p = new Problem(costFunction, constraint, theta);
                optimize.minimize(p, endCriteria);
                theta = p.currentValue();
                result.fill(1);
                for (i = 0; i < size; i++)
                {
                    for (k = 0; k < size; k++)
                    {
                        for (j = 0; j <= k; j++)
                        {
                            if (j == k && k != size - 1)
                            {
                                result[i, k] *= Math.Cos(theta[j * size + i]);
                            }
                            else if (j != size - 1)
                            {
                                result[i, k] *= Math.Sin(theta[j * size + i]);
                            }
                        }
                    }
                }
            }

            for (i = 0; i < size; i++)
            {
                for (j = 0; j < size; j++)
                {
                    result[i, j] *= variance[i];
                }
            }
            return(result);
        }
Ejemplo n.º 4
0
        public override void update()
        {
            this.coeff_.updateModelInstance();

            // we should also check that y contains positive values only

            // we must update weights if it is vegaWeighted
            if (vegaWeighted_)
            {
                coeff_.weights_.Clear();
                double weightsSum = 0.0;

                for (int i = 0; i < xBegin_.Count; i++)
                {
                    double stdDev = Math.Sqrt((yBegin_[i]) * (yBegin_[i]) * this.coeff_.t_);
                    coeff_.weights_.Add(coeff_.model_.weight(xBegin_[i], forward_, stdDev, this.coeff_.addParams_));
                    weightsSum += coeff_.weights_.Last();
                }

                // weight normalization
                for (int i = 0; i < coeff_.weights_.Count; i++)
                {
                    coeff_.weights_[i] /= weightsSum;
                }
            }

            // there is nothing to optimize
            if (coeff_.paramIsFixed_.Aggregate((a, b) => b && a))
            {
                coeff_.error_           = interpolationError();
                coeff_.maxError_        = interpolationMaxError();
                coeff_.XABREndCriteria_ = EndCriteria.Type.None;
                return;
            }
            else
            {
                XABRError costFunction = new XABRError(this);

                Vector guess = new Vector(coeff_.model_.dimension());
                for (int i = 0; i < guess.size(); ++i)
                {
                    guess[i] = coeff_.params_[i].Value;
                }

                int    iterations     = 0;
                int    freeParameters = 0;
                double bestError      = double.MaxValue;
                Vector bestParameters = new Vector();
                for (int i = 0; i < coeff_.model_.dimension(); ++i)
                {
                    if (!coeff_.paramIsFixed_[i])
                    {
                        ++freeParameters;
                    }
                }
                HaltonRsg        halton = new HaltonRsg(freeParameters, 42);
                EndCriteria.Type tmpEndCriteria;
                double           tmpInterpolationError;

                do
                {
                    if (iterations > 0)
                    {
                        Sample <List <double> > s = halton.nextSequence();
                        coeff_.model_.guess(guess, coeff_.paramIsFixed_, forward_, coeff_.t_, s.value, coeff_.addParams_);
                        for (int i = 0; i < coeff_.paramIsFixed_.Count; ++i)
                        {
                            if (coeff_.paramIsFixed_[i])
                            {
                                guess[i] = coeff_.params_[i].Value;
                            }
                        }
                    }

                    Vector inversedTransformatedGuess = new Vector(coeff_.model_.inverse(guess, coeff_.paramIsFixed_, coeff_.params_, forward_));

                    ProjectedCostFunction rainedXABRError = new ProjectedCostFunction(costFunction, inversedTransformatedGuess,
                                                                                      coeff_.paramIsFixed_);

                    Vector projectedGuess = new Vector(rainedXABRError.project(inversedTransformatedGuess));

                    NoConstraint raint   = new NoConstraint();
                    Problem      problem = new Problem(rainedXABRError, raint, projectedGuess);
                    tmpEndCriteria = optMethod_.minimize(problem, endCriteria_);
                    Vector projectedResult = new Vector(problem.currentValue());
                    Vector transfResult    = new Vector(rainedXABRError.include(projectedResult));
                    Vector result          = coeff_.model_.direct(transfResult, coeff_.paramIsFixed_, coeff_.params_, forward_);
                    tmpInterpolationError = useMaxError_ ? interpolationMaxError()
                                                     : interpolationError();

                    if (tmpInterpolationError < bestError)
                    {
                        bestError               = tmpInterpolationError;
                        bestParameters          = result;
                        coeff_.XABREndCriteria_ = tmpEndCriteria;
                    }
                } while (++iterations < maxGuesses_ &&
                         tmpInterpolationError > errorAccept_);

                for (int i = 0; i < bestParameters.size(); ++i)
                {
                    coeff_.params_[i] = bestParameters[i];
                }

                coeff_.error_    = interpolationError();
                coeff_.maxError_ = interpolationMaxError();
            }
        }
Ejemplo n.º 5
0
        public void calculate()
        {
            validCurve_ = false;
            int nInsts = ts_.instruments_.Count, i;

            // ensure rate helpers are sorted
            ts_.instruments_.Sort((x, y) => x.latestDate().CompareTo(y.latestDate()));

            // check that there is no instruments with the same maturity
            for (i = 1; i < nInsts; ++i)
            {
                Date m1 = ts_.instruments_[i - 1].latestDate(),
                     m2 = ts_.instruments_[i].latestDate();
                if (m1 == m2)
                {
                    throw new ArgumentException("two instruments have the same maturity (" + m1 + ")");
                }
            }

            // check that there is no instruments with invalid quote
            if ((i = ts_.instruments_.FindIndex(x => !x.quoteIsValid())) != -1)
            {
                throw new ArgumentException("instrument " + i + " (maturity: " + ts_.instruments_[i].latestDate() +
                                            ") has an invalid quote");
            }

            // setup instruments and register with them
            ts_.instruments_.ForEach(j => j.setTermStructure(ts_));

            // set initial guess only if the current curve cannot be used as guess
            if (validCurve_)
            {
                if (ts_.data_.Count != nInsts + 1)
                {
                    throw new ArgumentException("dimension mismatch: expected " + nInsts + 1 + ", actual " + ts_.data_.Count);
                }
            }
            else
            {
                ts_.data_    = new InitializedList <double>(nInsts + 1);
                ts_.data_[0] = ts_.initialValue(ts_);
            }

            // calculate dates and times
            ts_.dates_    = new InitializedList <Date>(nInsts + 1);
            ts_.times_    = new InitializedList <double>(nInsts + 1);
            ts_.dates_[0] = ts_.initialDate(ts_);
            ts_.times_[0] = ts_.timeFromReference(ts_.dates_[0]);
            for (i = 0; i < nInsts; ++i)
            {
                ts_.dates_[i + 1] = ts_.instruments_[i].latestDate();
                ts_.times_[i + 1] = ts_.timeFromReference(ts_.dates_[i + 1]);
                if (!validCurve_)
                {
                    ts_.data_[i + 1] = ts_.data_[i];
                }
            }

            LevenbergMarquardt solver           = new LevenbergMarquardt(ts_.accuracy_, ts_.accuracy_, ts_.accuracy_);
            EndCriteria        endCriteria      = new EndCriteria(100, 10, 0.00, ts_.accuracy_, 0.00);
            PositiveConstraint posConstraint    = new PositiveConstraint();
            NoConstraint       noConstraint     = new NoConstraint();
            Constraint         solverConstraint = forcePositive_ ? (Constraint)posConstraint : (Constraint)noConstraint;

            // now start the bootstrapping.
            int iInst = localisation_ - 1;

            int dataAdjust = (ts_.interpolator_ as ConvexMonotone).dataSizeAdjustment;

            do
            {
                int    initialDataPt = iInst + 1 - localisation_ + dataAdjust;
                Vector startArray    = new Vector(localisation_ + 1 - dataAdjust);
                for (int j = 0; j < startArray.size() - 1; ++j)
                {
                    startArray[j] = ts_.data_[initialDataPt + j];
                }

                // here we are extending the interpolation a point at a
                // time... but the local interpolator can make an
                // approximation for the final localisation period.
                // e.g. if the localisation is 2, then the first section
                // of the curve will be solved using the first 2
                // instruments... with the local interpolator making
                // suitable boundary conditions.
                ts_.interpolation_ = (ts_.interpolator_ as ConvexMonotone).localInterpolate(ts_.times_, iInst + 2, ts_.data_,
                                                                                            localisation_, ts_.interpolation_ as ConvexMonotoneInterpolation, nInsts + 1);

                if (iInst >= localisation_)
                {
                    startArray[localisation_ - dataAdjust] = ts_.guess(ts_, ts_.dates_[iInst]);
                }
                else
                {
                    startArray[localisation_ - dataAdjust] = ts_.data_[0];
                }

                var currentCost = new PenaltyFunction <PiecewiseYieldCurve>(ts_, initialDataPt, ts_.instruments_,
                                                                            iInst - localisation_ + 1, iInst + 1);
                Problem          toSolve = new Problem(currentCost, solverConstraint, startArray);
                EndCriteria.Type endType = solver.minimize(toSolve, endCriteria);

                // check the end criteria
                if (!(endType == EndCriteria.Type.StationaryFunctionAccuracy ||
                      endType == EndCriteria.Type.StationaryFunctionValue))
                {
                    throw new ApplicationException("Unable to strip yieldcurve to required accuracy ");
                }
                ++iInst;
            } while (iInst < nInsts);

            validCurve_ = true;
        }
Ejemplo n.º 6
0
        // Optimization function for hypersphere and lower-diagonal algorithm
        private static Matrix hypersphereOptimize(Matrix targetMatrix, Matrix currentRoot, bool lowerDiagonal)
        {
            int i,j,k,size = targetMatrix.rows();
            Matrix result = new Matrix(currentRoot);
            Vector variance = new Vector(size);
            for (i=0; i<size; i++){
                variance[i]=Math.Sqrt(targetMatrix[i,i]);
            }
            if (lowerDiagonal) {
                Matrix approxMatrix = result*Matrix.transpose(result);
                result = MatrixUtilities.CholeskyDecomposition(approxMatrix, true);
                for (i=0; i<size; i++) {
                    for (j=0; j<size; j++) {
                        result[i,j]/=Math.Sqrt(approxMatrix[i,i]);
                    }
                }
            } else {
                for (i=0; i<size; i++) {
                    for (j=0; j<size; j++) {
                        result[i,j]/=variance[i];
                    }
                }
            }

            ConjugateGradient optimize = new ConjugateGradient();
            EndCriteria endCriteria = new EndCriteria(100, 10, 1e-8, 1e-8, 1e-8);
            HypersphereCostFunction costFunction = new HypersphereCostFunction(targetMatrix, variance, lowerDiagonal);
            NoConstraint constraint = new NoConstraint();

            // hypersphere vector optimization

            if (lowerDiagonal) {
                Vector theta = new Vector(size * (size-1)/2);
                const double eps=1e-16;
                for (i=1; i<size; i++) {
                    for (j=0; j<i; j++) {
                        theta[i*(i-1)/2+j]=result[i,j];
                        if (theta[i*(i-1)/2+j]>1-eps)
                            theta[i*(i-1)/2+j]=1-eps;
                        if (theta[i*(i-1)/2+j]<-1+eps)
                            theta[i*(i-1)/2+j]=-1+eps;
                        for (k=0; k<j; k++) {
                            theta[i*(i-1)/2+j] /= Math.Sin(theta[i*(i-1)/2+k]);
                            if (theta[i*(i-1)/2+j]>1-eps)
                                theta[i*(i-1)/2+j]=1-eps;
                            if (theta[i*(i-1)/2+j]<-1+eps)
                                theta[i*(i-1)/2+j]=-1+eps;
                        }
                        theta[i*(i-1)/2+j] = Math.Acos(theta[i*(i-1)/2+j]);
                        if (j==i-1) {
                            if (result[i,i]<0)
                                theta[i*(i-1)/2+j]=-theta[i*(i-1)/2+j];
                        }
                    }
                }
                Problem p = new Problem(costFunction, constraint, theta);
                optimize.minimize(p, endCriteria);
                theta = p.currentValue();
                result.fill(1);
                for (i=0; i<size; i++) {
                    for (k=0; k<size; k++) {
                        if (k>i) {
                            result[i,k]=0;
                        } else {
                            for (j=0; j<=k; j++) {
                                if (j == k && k!=i)
                                    result[i,k] *= Math.Cos(theta[i*(i-1)/2+j]);
                                else if (j!=i)
                                    result[i,k] *= Math.Sin(theta[i*(i-1)/2+j]);
                            }
                        }
                    }
                }
            } else {
                Vector theta = new Vector(size * (size-1));
                const double eps=1e-16;
                for (i=0; i<size; i++) {
                    for (j=0; j<size-1; j++) {
                        theta[j*size+i]=result[i,j];
                        if (theta[j*size+i]>1-eps)
                            theta[j*size+i]=1-eps;
                        if (theta[j*size+i]<-1+eps)
                            theta[j*size+i]=-1+eps;
                        for (k=0;k<j;k++) {
                            theta[j*size+i] /= Math.Sin(theta[k*size+i]);
                            if (theta[j*size+i]>1-eps)
                                theta[j*size+i]=1-eps;
                            if (theta[j*size+i]<-1+eps)
                                theta[j*size+i]=-1+eps;
                        }
                        theta[j*size+i] = Math.Acos(theta[j*size+i]);
                        if (j==size-2) {
                            if (result[i,j+1]<0)
                                theta[j*size+i]=-theta[j*size+i];
                        }
                    }
                }
                Problem p = new Problem(costFunction, constraint, theta);
                optimize.minimize(p, endCriteria);
                theta=p.currentValue();
                result.fill(1);
                for (i = 0; i < size; i++) {
                    for (k=0; k<size; k++) {
                        for (j=0; j<=k; j++) {
                            if (j == k && k!=size-1)
                                result[i,k] *= Math.Cos(theta[j*size+i]);
                            else if (j!=size-1)
                                result[i,k] *= Math.Sin(theta[j*size+i]);
                        }
                    }
                }
            }

            for (i=0; i<size; i++) {
                for (j=0; j<size; j++) {
                    result[i,j]*=variance[i];
                }
            }
            return result;
        }
Ejemplo n.º 7
0
        public void compute()
        {
            if (vegaWeighted_)
            {
                double weightsSum = 0.0;
                for (int i = 0; i < times_.Count; i++)
                {
                    double stdDev = Math.Sqrt(blackVols_[i] * blackVols_[i] * times_[i]);
                    // when strike==forward, the blackFormulaStdDevDerivative becomes
                    weights_[i] = new CumulativeNormalDistribution().derivative(.5 * stdDev);
                    weightsSum += weights_[i];
                }
                // weight normalization
                for (int i = 0; i < times_.Count; i++)
                {
                    weights_[i] /= weightsSum;
                }
            }
            // there is nothing to optimize
            if (aIsFixed_ && bIsFixed_ && cIsFixed_ && dIsFixed_)
            {
                abcdEndCriteria_ = QLNet.EndCriteria.Type.None;
                return;
            }
            else
            {
                AbcdError costFunction = new AbcdError(this);
                transformation_ = new AbcdParametersTransformation();

                Vector guess = new Vector(4);
                guess[0] = a_;
                guess[1] = b_;
                guess[2] = c_;
                guess[3] = d_;

                List <bool> parameterAreFixed = new InitializedList <bool>(4);
                parameterAreFixed[0] = aIsFixed_;
                parameterAreFixed[1] = bIsFixed_;
                parameterAreFixed[2] = cIsFixed_;
                parameterAreFixed[3] = dIsFixed_;

                Vector inversedTransformatedGuess = new Vector(transformation_.inverse(guess));

                ProjectedCostFunction projectedAbcdCostFunction = new ProjectedCostFunction(costFunction,
                                                                                            inversedTransformatedGuess, parameterAreFixed);

                Vector projectedGuess = new Vector(projectedAbcdCostFunction.project(inversedTransformatedGuess));

                NoConstraint constraint = new NoConstraint();
                Problem      problem    = new Problem(projectedAbcdCostFunction, constraint, projectedGuess);
                abcdEndCriteria_ = optMethod_.minimize(problem, endCriteria_);
                Vector projectedResult = new Vector(problem.currentValue());
                Vector transfResult    = new Vector(projectedAbcdCostFunction.include(projectedResult));

                Vector result = transformation_.direct(transfResult);
                QLNet.AbcdMathFunction.validate(a_, b_, c_, d_);
                a_ = result[0];
                b_ = result[1];
                c_ = result[2];
                d_ = result[3];
            }
        }