Esempio n. 1
0
            private double cornishFisherEps(HestonProcess process, double nu_0, double nu_t, double dt, double eps)
            {
                // use moment generating function to get the
                // first,second, third and fourth moment of the distribution
                double d   = 1e-2;
                double p2  = Phi(process, new Complex(0, -2 * d), nu_0, nu_t, dt).Real;
                double p1  = Phi(process, new Complex(0, -d), nu_0, nu_t, dt).Real;
                double p0  = Phi(process, new Complex(0, 0), nu_0, nu_t, dt).Real;
                double pm1 = Phi(process, new Complex(0, d), nu_0, nu_t, dt).Real;
                double pm2 = Phi(process, new Complex(0, 2 * d), nu_0, nu_t, dt).Real;

                double avg    = (pm2 - 8 * pm1 + 8 * p1 - p2) / (12 * d);
                double m2     = (-pm2 + 16 * pm1 - 30 * p0 + 16 * p1 - p2) / (12 * d * d);
                double var    = m2 - avg * avg;
                double stdDev = Math.Sqrt(var);

                double m3   = (-0.5 * pm2 + pm1 - p1 + 0.5 * p2) / (d * d * d);
                double skew = (m3 - 3 * var * avg - avg * avg * avg) / (var * stdDev);

                double m4   = (pm2 - 4 * pm1 + 6 * p0 - 4 * p1 + p2) / (d * d * d * d);
                double kurt = (m4 - 4 * m3 * avg + 6 * m2 * avg * avg - 3 * avg * avg * avg * avg) / (var * var);

                // Cornish-Fisher relation to come up with an improved
                // estimate of 1-F(u_\eps) < \eps
                double q = new InverseCumulativeNormal().value(1 - eps);
                double w = q + (q * q - 1) / 6 * skew + (q * q * q - 3 * q) / 24 * (kurt - 3)
                           - (2 * q * q * q - 5 * q) / 36 * skew * skew;

                return(avg + w * stdDev);
            }
        /*! gaussian-assumption y-th percentile
         */
        /*! \pre percentile must be in range (0%-100%) extremes excluded */
        public double gaussianPercentile(double percentile)
        {
            Utils.QL_REQUIRE(percentile > 0.0 && percentile < 1.0, () => "percentile (" + percentile + ") must be in (0.0, 1.0)");

            InverseCumulativeNormal gInverse = new InverseCumulativeNormal(mean(), standardDeviation());

            return(gInverse.value(percentile));
        }
        public FdmHestonLocalVolatilityVarianceMesher(int size,
                                                      HestonProcess process,
                                                      LocalVolTermStructure leverageFct,
                                                      double maturity,
                                                      int tAvgSteps  = 10,
                                                      double epsilon = 0.0001)
            : base(size)
        {
            leverageFct_ = leverageFct;
            FdmHestonVarianceMesher mesher = new FdmHestonVarianceMesher(size, process, maturity, tAvgSteps, epsilon);

            for (int i = 0; i < size; ++i)
            {
                dplus_[i]     = mesher.dplus(i);
                dminus_[i]    = mesher.dminus(i);
                locations_[i] = mesher.location(i);
            }

            volaEstimate_ = mesher.volaEstimate();

            if (leverageFct != null)
            {
                double s0 = process.s0().currentLink().value();

                List <double> acc = new List <double>();
                acc.Add(leverageFct.localVol(0.0, s0, true));

                Handle <YieldTermStructure> rTS = process.riskFreeRate();
                Handle <YieldTermStructure> qTS = process.dividendYield();

                for (int l = 1; l <= tAvgSteps; ++l)
                {
                    double t = (maturity * l) / tAvgSteps;
                    double vol = volaEstimate_ * acc.Average();
                    double fwd = s0 * qTS.currentLink().discount(t) / rTS.currentLink().discount(t);
                    int    sAvgSteps = 50;
                    Vector u = new Vector(sAvgSteps), sig = new Vector(sAvgSteps);

                    for (int i = 0; i < sAvgSteps; ++i)
                    {
                        u[i] = epsilon + ((1.0 - 2.0 * epsilon) / (sAvgSteps - 1.0)) * i;
                        double x  = new InverseCumulativeNormal().value(u[i]);
                        double gf = x * vol * Math.Sqrt(t);
                        double f  = fwd * Math.Exp(gf);
                        sig[i] = Math.Pow(leverageFct.localVol(t, f, true), 2.0);
                    }

                    double leverageAvg = new GaussLobattoIntegral(10000, 1E-4).value(new interpolated_volatility(u, sig).value,
                                                                                     u.First(),
                                                                                     u.Last())
                                         / (1.0 - 2.0 * epsilon);

                    acc.Add(leverageAvg);
                }

                volaEstimate_ *= acc.Average();
            }
        }
        //! gaussian-assumption Expected Shortfall at a given percentile

        /*! Assuming a gaussian distribution it
         *  returns the expected loss in case that the loss exceeded
         *  a VaR threshold,
         *
         *  that is the average of observations below the
         *  given percentile \f$ p \f$.
         *  Also know as conditional value-at-risk.
         *
         *  See Artzner, Delbaen, Eber and Heath,
         *  "Coherent measures of risk", Mathematical Finance 9 (1999)
         */
        public double gaussianExpectedShortfall(double percentile)
        {
            Utils.QL_REQUIRE(percentile < 1.0 && percentile >= 0.9, () => "percentile (" + percentile + ") out of range [0.9, 1)");

            double m   = this.mean();
            double std = this.standardDeviation();
            InverseCumulativeNormal gInverse = new InverseCumulativeNormal(m, std);
            double             var           = gInverse.value(1.0 - percentile);
            NormalDistribution g             = new NormalDistribution(m, std);
            double             result        = m - std * std * g.value(var) / (1.0 - percentile);

            // expectedShortfall must be a loss
            // this means that it has to be MIN(result, 0.0)
            // expectedShortfall must also be a positive quantity, so -MIN(*)
            return(-Math.Min(result, 0.0));
        }
Esempio n. 5
0
        // alternative delta type
        private double strikeFromDelta(double delta, DeltaVolQuote.DeltaType dt)
        {
            double res = 0.0;
            double arg = 0.0;
            InverseCumulativeNormal f = new InverseCumulativeNormal();

            Utils.QL_REQUIRE(delta * phi_ >= 0.0, () => "Option type and delta are incoherent.");

            switch (dt)
            {
            case DeltaVolQuote.DeltaType.Spot:
                Utils.QL_REQUIRE(Math.Abs(delta) <= fDiscount_, () => "Spot delta out of range.");
                arg = -phi_ *f.value(phi_ *delta / fDiscount_) * stdDev_ + 0.5 * stdDev_ * stdDev_;

                res = forward_ * Math.Exp(arg);
                break;

            case DeltaVolQuote.DeltaType.Fwd:
                Utils.QL_REQUIRE(Math.Abs(delta) <= 1.0, () => "Forward delta out of range.");
                arg = -phi_ *f.value(phi_ *delta) * stdDev_ + 0.5 * stdDev_ * stdDev_;

                res = forward_ * Math.Exp(arg);
                break;

            case DeltaVolQuote.DeltaType.PaSpot:
            case DeltaVolQuote.DeltaType.PaFwd:
                // This has to be solved numerically. One of the
                // problems is that the premium adjusted call delta is
                // not monotonic in strike, such that two solutions
                // might occur. The one right to the max of the delta is
                // considered to be the correct strike.  Some proper
                // interval bounds for the strike need to be chosen, the
                // numerics can otherwise be very unreliable and
                // unstable.  I've chosen Brent over Newton, since the
                // interval can be specified explicitly and we can not
                // run into the area on the left of the maximum.  The
                // put delta doesn't have this property and can be
                // solved without any problems, but also numerically.

                BlackDeltaPremiumAdjustedSolverClass f1 = new BlackDeltaPremiumAdjustedSolverClass(
                    ot_, dt, spot_, dDiscount_, fDiscount_, stdDev_, delta);

                Brent solver = new Brent();
                solver.setMaxEvaluations(1000);
                double accuracy = 1.0e-10;

                double rightLimit = 0.0;
                double leftLimit  = 0.0;

                // Strike of not premium adjusted is always to the right of premium adjusted
                if (dt == DeltaVolQuote.DeltaType.PaSpot)
                {
                    rightLimit = strikeFromDelta(delta, DeltaVolQuote.DeltaType.Spot);
                }
                else
                {
                    rightLimit = strikeFromDelta(delta, DeltaVolQuote.DeltaType.Fwd);
                }

                if (phi_ < 0)
                {
                    // if put
                    res = solver.solve(f1, accuracy, rightLimit, 0.0, spot_ * 100.0);
                    break;
                }
                else
                {
                    // find out the left limit which is the strike
                    // corresponding to the value where premium adjusted
                    // deltas have their maximum.

                    BlackDeltaPremiumAdjustedMaxStrikeClass g = new BlackDeltaPremiumAdjustedMaxStrikeClass(
                        ot_, dt, spot_, dDiscount_, fDiscount_, stdDev_);

                    leftLimit = solver.solve(g, accuracy, rightLimit * 0.5, 0.0, rightLimit);

                    double guess = leftLimit + (rightLimit - leftLimit) * 0.5;

                    res = solver.solve(f1, accuracy, guess, leftLimit, rightLimit);
                } // end if phi<0 else

                break;


            default:
                Utils.QL_FAIL("invalid delta type");
                break;
            }

            return(res);
        }
        public FdmBlackScholesMultiStrikeMesher(int size,
                                                GeneralizedBlackScholesProcess process,
                                                double maturity,
                                                List <double> strikes,
                                                double eps         = 0.0001,
                                                double scaleFactor = 1.5,
                                                Pair <double?, double?> cPoint
                                                = null)
            : base(size)
        {
            double spot = process.x0();

            Utils.QL_REQUIRE(spot > 0.0, () => "negative or null underlying given");

            double d = process.dividendYield().currentLink().discount(maturity)
                       / process.riskFreeRate().currentLink().discount(maturity);
            double minStrike = strikes.Min();
            double maxStrike = strikes.Max();

            double Fmin = spot * spot / maxStrike * d;
            double Fmax = spot * spot / minStrike * d;

            Utils.QL_REQUIRE(Fmin > 0.0, () => "negative forward given");

            // Set the grid boundaries
            double normInvEps = new InverseCumulativeNormal().value(1 - eps);
            double sigmaSqrtTmin
                = process.blackVolatility().currentLink().blackVol(maturity, minStrike)
                  * Math.Sqrt(maturity);
            double sigmaSqrtTmax
                = process.blackVolatility().currentLink().blackVol(maturity, maxStrike)
                  * Math.Sqrt(maturity);

            double xMin
                = Math.Min(0.8 * Math.Log(0.8 * spot * spot / maxStrike),
                           Math.Log(Fmin) - sigmaSqrtTmin * normInvEps * scaleFactor
                           - sigmaSqrtTmin * sigmaSqrtTmin / 2.0);
            double xMax
                = Math.Max(1.2 * Math.Log(0.8 * spot * spot / minStrike),
                           Math.Log(Fmax) + sigmaSqrtTmax * normInvEps * scaleFactor
                           - sigmaSqrtTmax * sigmaSqrtTmax / 2.0);

            Fdm1dMesher helper;

            if (cPoint.first != null &&
                Math.Log(cPoint.first.Value) >= xMin && Math.Log(cPoint.first.Value) <= xMax)
            {
                helper = new Concentrating1dMesher(xMin, xMax, size,
                                                   new Pair <double?, double?>(Math.Log(cPoint.first.Value), cPoint.second));
            }
            else
            {
                helper = new Uniform1dMesher(xMin, xMax, size);
            }

            locations_ = helper.locations();
            for (int i = 0; i < locations_.Count; ++i)
            {
                dplus_[i]  = helper.dplus(i);
                dminus_[i] = helper.dminus(i);
            }
        }
        public FdmBlackScholesMesher(int size,
                                     GeneralizedBlackScholesProcess process,
                                     double maturity, double strike,
                                     double?xMinConstraint = null,
                                     double?xMaxConstraint = null,
                                     double eps            = 0.0001,
                                     double scaleFactor    = 1.5,
                                     Pair <double?, double?> cPoint
                                     = null,
                                     DividendSchedule dividendSchedule = null,
                                     FdmQuantoHelper fdmQuantoHelper   = null,
                                     double spotAdjustment             = 0.0)
            : base(size)
        {
            double S = process.x0();

            Utils.QL_REQUIRE(S > 0.0, () => "negative or null underlying given");

            dividendSchedule = dividendSchedule == null ? new DividendSchedule() : dividendSchedule;
            List <pair_double> intermediateSteps = new List <pair_double>();

            for (int i = 0; i < dividendSchedule.Count &&
                 process.time(dividendSchedule[i].date()) <= maturity; ++i)
            {
                intermediateSteps.Add(
                    new pair_double(
                        process.time(dividendSchedule[i].date()),
                        dividendSchedule[i].amount()
                        ));
            }

            int intermediateTimeSteps = (int)Math.Max(2, 24.0 * maturity);

            for (int i = 0; i < intermediateTimeSteps; ++i)
            {
                intermediateSteps.Add(
                    new pair_double((i + 1) * (maturity / intermediateTimeSteps), 0.0));
            }

            intermediateSteps.Sort();

            Handle <YieldTermStructure> rTS = process.riskFreeRate();
            Handle <YieldTermStructure> qTS = fdmQuantoHelper != null
                                          ? new Handle <YieldTermStructure>(
                new QuantoTermStructure(process.dividendYield(),
                                        process.riskFreeRate(),
                                        new Handle <YieldTermStructure>(fdmQuantoHelper.foreignTermStructure()),
                                        process.blackVolatility(),
                                        strike,
                                        new Handle <BlackVolTermStructure>(fdmQuantoHelper.fxVolatilityTermStructure()),
                                        fdmQuantoHelper.exchRateATMlevel(),
                                        fdmQuantoHelper.equityFxCorrelation()))
                                          : process.dividendYield();

            double lastDivTime = 0.0;
            double fwd = S + spotAdjustment;
            double mi = fwd, ma = fwd;

            for (int i = 0; i < intermediateSteps.Count; ++i)
            {
                double divTime   = intermediateSteps[i].first;
                double divAmount = intermediateSteps[i].second;

                fwd = fwd / rTS.currentLink().discount(divTime) * rTS.currentLink().discount(lastDivTime)
                      * qTS.currentLink().discount(divTime) / qTS.currentLink().discount(lastDivTime);

                mi = Math.Min(mi, fwd); ma = Math.Max(ma, fwd);

                fwd -= divAmount;

                mi = Math.Min(mi, fwd); ma = Math.Max(ma, fwd);

                lastDivTime = divTime;
            }

            // Set the grid boundaries
            double normInvEps = new InverseCumulativeNormal().value(1 - eps);
            double sigmaSqrtT
                = process.blackVolatility().currentLink().blackVol(maturity, strike)
                  * Math.Sqrt(maturity);

            double?xMin = Math.Log(mi) - sigmaSqrtT * normInvEps * scaleFactor;
            double?xMax = Math.Log(ma) + sigmaSqrtT * normInvEps * scaleFactor;

            if (xMinConstraint != null)
            {
                xMin = xMinConstraint;
            }
            if (xMaxConstraint != null)
            {
                xMax = xMaxConstraint;
            }

            Fdm1dMesher helper;

            if (cPoint != null &&
                cPoint.first != null &&
                Math.Log(cPoint.first.Value) >= xMin && Math.Log(cPoint.first.Value) <= xMax)
            {
                helper = new Concentrating1dMesher(xMin.Value, xMax.Value, size,
                                                   new Pair <double?, double?>(Math.Log(cPoint.first.Value), cPoint.second));
            }
            else
            {
                helper = new Uniform1dMesher(xMin.Value, xMax.Value, size);
            }

            locations_ = helper.locations();
            for (int i = 0; i < locations_.Count; ++i)
            {
                dplus_[i]  = helper.dplus(i);
                dminus_[i] = helper.dminus(i);
            }
        }