예제 #1
0
        public override void update()
        {
            coeff_.updateModelInstance();

            // we should also check that y contains positive values only

            // we must update weights if it is vegaWeighted
            if (vegaWeighted_)
            {
                coeff_.weights_.Clear();
                double weightsSum = 0.0;

                for (int i = 0; i < xBegin_.Count; i++)
                {
                    double stdDev = Math.Sqrt((yBegin_[i]) * (yBegin_[i]) * coeff_.t_);
                    coeff_.weights_.Add(coeff_.model_.weight(xBegin_[i], forward_, stdDev, coeff_.addParams_));
                    weightsSum += coeff_.weights_.Last();
                }

                // weight normalization
                for (int i = 0; i < coeff_.weights_.Count; i++)
                {
                    coeff_.weights_[i] /= weightsSum;
                }
            }

            // there is nothing to optimize
            if (coeff_.paramIsFixed_.Aggregate((a, b) => b && a))
            {
                coeff_.error_           = interpolationError();
                coeff_.maxError_        = interpolationMaxError();
                coeff_.XABREndCriteria_ = EndCriteria.Type.None;
                return;
            }

            XABRError costFunction = new XABRError(this);

            Vector guess = new Vector(coeff_.model_.dimension());

            for (int i = 0; i < guess.size(); ++i)
            {
                guess[i] = coeff_.params_[i].GetValueOrDefault();
            }

            int    iterations     = 0;
            int    freeParameters = 0;
            double bestError      = double.MaxValue;
            Vector bestParameters = new Vector();

            for (int i = 0; i < coeff_.model_.dimension(); ++i)
            {
                if (!coeff_.paramIsFixed_[i])
                {
                    ++freeParameters;
                }
            }
            HaltonRsg halton = new HaltonRsg(freeParameters, 42);

            EndCriteria.Type tmpEndCriteria;
            double           tmpInterpolationError;

            do
            {
                if (iterations > 0)
                {
                    Sample <List <double> > s = halton.nextSequence();
                    coeff_.model_.guess(guess, coeff_.paramIsFixed_, forward_, coeff_.t_, s.value, coeff_.addParams_);
                    for (int i = 0; i < coeff_.paramIsFixed_.Count; ++i)
                    {
                        if (coeff_.paramIsFixed_[i])
                        {
                            guess[i] = coeff_.params_[i].GetValueOrDefault();
                        }
                    }
                }

                Vector inversedTransformatedGuess =
                    new Vector(coeff_.model_.inverse(guess, coeff_.paramIsFixed_, coeff_.params_, forward_));

                ProjectedCostFunction rainedXABRError = new ProjectedCostFunction(costFunction,
                                                                                  inversedTransformatedGuess,
                                                                                  coeff_.paramIsFixed_);

                Vector projectedGuess = new Vector(rainedXABRError.project(inversedTransformatedGuess));

                constraint_.config(rainedXABRError, coeff_, forward_);
                Problem problem = new Problem(rainedXABRError, constraint_, projectedGuess);
                tmpEndCriteria = optMethod_.minimize(problem, endCriteria_);
                Vector projectedResult = new Vector(problem.currentValue());
                Vector transfResult    = new Vector(rainedXABRError.include(projectedResult));
                Vector result          = coeff_.model_.direct(transfResult, coeff_.paramIsFixed_, coeff_.params_, forward_);
                tmpInterpolationError = useMaxError_
                                    ? interpolationMaxError()
                                    : interpolationError();

                if (tmpInterpolationError < bestError)
                {
                    bestError               = tmpInterpolationError;
                    bestParameters          = result;
                    coeff_.XABREndCriteria_ = tmpEndCriteria;
                }
            }while (++iterations < maxGuesses_ &&
                    tmpInterpolationError > errorAccept_);

            for (int i = 0; i < bestParameters.size(); ++i)
            {
                coeff_.params_[i] = bestParameters[i];
            }

            coeff_.error_    = interpolationError();
            coeff_.maxError_ = interpolationMaxError();
        }
예제 #2
0
 public virtual void config <Model>(ProjectedCostFunction costFunction, XABRCoeffHolder <Model> coeff,
                                    double forward)
     where Model : IModel, new ()
 {
 }
예제 #3
0
        public void compute()
        {
            if (vegaWeighted_)
            {
                double weightsSum = 0.0;
                for (int i = 0; i < times_.Count; i++)
                {
                    double stdDev = Math.Sqrt(blackVols_[i] * blackVols_[i] * times_[i]);
                    // when strike==forward, the blackFormulaStdDevDerivative becomes
                    weights_[i] = new CumulativeNormalDistribution().derivative(.5 * stdDev);
                    weightsSum += weights_[i];
                }
                // weight normalization
                for (int i = 0; i < times_.Count; i++)
                {
                    weights_[i] /= weightsSum;
                }
            }
            // there is nothing to optimize
            if (aIsFixed_ && bIsFixed_ && cIsFixed_ && dIsFixed_)
            {
                abcdEndCriteria_ = QLCore.EndCriteria.Type.None;
                return;
            }
            else
            {
                AbcdError costFunction = new AbcdError(this);
                transformation_ = new AbcdParametersTransformation();

                Vector guess = new Vector(4);
                guess[0] = a_;
                guess[1] = b_;
                guess[2] = c_;
                guess[3] = d_;

                List <bool> parameterAreFixed = new InitializedList <bool>(4);
                parameterAreFixed[0] = aIsFixed_;
                parameterAreFixed[1] = bIsFixed_;
                parameterAreFixed[2] = cIsFixed_;
                parameterAreFixed[3] = dIsFixed_;

                Vector inversedTransformatedGuess = new Vector(transformation_.inverse(guess));

                ProjectedCostFunction projectedAbcdCostFunction = new ProjectedCostFunction(costFunction,
                                                                                            inversedTransformatedGuess, parameterAreFixed);

                Vector projectedGuess = new Vector(projectedAbcdCostFunction.project(inversedTransformatedGuess));

                NoConstraint constraint = new NoConstraint();
                Problem      problem    = new Problem(projectedAbcdCostFunction, constraint, projectedGuess);
                abcdEndCriteria_ = optMethod_.minimize(problem, endCriteria_);
                Vector projectedResult = new Vector(problem.currentValue());
                Vector transfResult    = new Vector(projectedAbcdCostFunction.include(projectedResult));

                Vector result = transformation_.direct(transfResult);
                QLCore.AbcdMathFunction.validate(a_, b_, c_, d_);
                a_ = result[0];
                b_ = result[1];
                c_ = result[2];
                d_ = result[3];
            }
        }