Ejemplo n.º 1
0
            private double cornishFisherEps(HestonProcess process, double nu_0, double nu_t, double dt, double eps)
            {
                // use moment generating function to get the
                // first,second, third and fourth moment of the distribution
                double d   = 1e-2;
                double p2  = Phi(process, new Complex(0, -2 * d), nu_0, nu_t, dt).Real;
                double p1  = Phi(process, new Complex(0, -d), nu_0, nu_t, dt).Real;
                double p0  = Phi(process, new Complex(0, 0), nu_0, nu_t, dt).Real;
                double pm1 = Phi(process, new Complex(0, d), nu_0, nu_t, dt).Real;
                double pm2 = Phi(process, new Complex(0, 2 * d), nu_0, nu_t, dt).Real;

                double avg    = (pm2 - 8 * pm1 + 8 * p1 - p2) / (12 * d);
                double m2     = (-pm2 + 16 * pm1 - 30 * p0 + 16 * p1 - p2) / (12 * d * d);
                double var    = m2 - avg * avg;
                double stdDev = Math.Sqrt(var);

                double m3   = (-0.5 * pm2 + pm1 - p1 + 0.5 * p2) / (d * d * d);
                double skew = (m3 - 3 * var * avg - avg * avg * avg) / (var * stdDev);

                double m4   = (pm2 - 4 * pm1 + 6 * p0 - 4 * p1 + p2) / (d * d * d * d);
                double kurt = (m4 - 4 * m3 * avg + 6 * m2 * avg * avg - 3 * avg * avg * avg * avg) / (var * var);

                // Cornish-Fisher relation to come up with an improved
                // estimate of 1-F(u_\eps) < \eps
                double q = new InverseCumulativeNormal().value(1 - eps);
                double w = q + (q * q - 1) / 6 * skew + (q * q * q - 3 * q) / 24 * (kurt - 3)
                           - (2 * q * q * q - 5 * q) / 36 * skew * skew;

                return(avg + w * stdDev);
            }
Ejemplo n.º 2
0
        /*! gaussian-assumption y-th percentile
         */
        /*! \pre percentile must be in range (0%-100%) extremes excluded */
        public double gaussianPercentile(double percentile)
        {
            Utils.QL_REQUIRE(percentile > 0.0 && percentile < 1.0, () => "percentile (" + percentile + ") must be in (0.0, 1.0)");

            InverseCumulativeNormal gInverse = new InverseCumulativeNormal(mean(), standardDeviation());

            return(gInverse.value(percentile));
        }
Ejemplo n.º 3
0
        /*! gaussian-assumption y-th percentile, defined as the value x
         *  such that \f[ y = \frac{1}{\sqrt{2 \pi}}
         *                            \int_{-\infty}^{x} \exp (-u^2/2) du \f]
         */
        /*! \pre percentile must be in range (0%-100%) extremes excluded */
        public double gaussianPercentile(double percentile)
        {
            if (!(percentile > 0.0 && percentile < 1.0))
            {
                throw new ApplicationException("percentile (" + percentile + ") must be in (0.0, 1.0)");
            }

            InverseCumulativeNormal gInverse = new InverseCumulativeNormal(mean(), standardDeviation());

            return(gInverse.value(percentile));
        }
Ejemplo n.º 4
0
        public FdmBlackScholesMesher(int size,
                                     GeneralizedBlackScholesProcess process,
                                     double maturity, double strike,
                                     double?xMinConstraint = null,
                                     double?xMaxConstraint = null,
                                     double eps            = 0.0001,
                                     double scaleFactor    = 1.5,
                                     Pair <double?, double?> cPoint
                                     = null)
            : base(size)
        {
            double S = process.x0();

            Utils.QL_REQUIRE(S > 0.0, () => "negative or null underlying given");

            // Set the grid boundaries
            double normInvEps = new InverseCumulativeNormal().value(1 - eps);
            double sigmaSqrtT
                = process.blackVolatility().currentLink().blackVol(maturity, strike)
                  * Math.Sqrt(maturity);

            double?xMin = Math.Log(S) - sigmaSqrtT * normInvEps * scaleFactor;
            double?xMax = Math.Log(S) + sigmaSqrtT * normInvEps * scaleFactor;

            if (xMinConstraint != null)
            {
                xMin = xMinConstraint;
            }
            if (xMaxConstraint != null)
            {
                xMax = xMaxConstraint;
            }

            Fdm1dMesher helper;

            if (cPoint != null &&
                Math.Log(cPoint.first.Value) >= xMin && Math.Log(cPoint.first.Value) <= xMax)
            {
                helper = new Concentrating1dMesher(xMin.Value, xMax.Value, size,
                                                   new Pair <double?, double?>(Math.Log(cPoint.first.Value), cPoint.second));
            }
            else
            {
                helper = new Uniform1dMesher(xMin.Value, xMax.Value, size);
            }

            locations_ = helper.locations();
            for (int i = 0; i < locations_.Count; ++i)
            {
                dplus_[i]  = helper.dplus(i);
                dminus_[i] = helper.dminus(i);
            }
        }
Ejemplo n.º 5
0
        //! gaussian-assumption Expected Shortfall at a given percentile

        /*! Assuming a gaussian distribution it
         *  returns the expected loss in case that the loss exceeded
         *  a VaR threshold,
         *
         *  that is the average of observations below the
         *  given percentile \f$ p \f$.
         *  Also know as conditional value-at-risk.
         *
         *  See Artzner, Delbaen, Eber and Heath,
         *  "Coherent measures of risk", Mathematical Finance 9 (1999)
         */
        public double gaussianExpectedShortfall(double percentile)
        {
            Utils.QL_REQUIRE(percentile < 1.0 && percentile >= 0.9, () => "percentile (" + percentile + ") out of range [0.9, 1)");

            double m   = this.mean();
            double std = this.standardDeviation();
            InverseCumulativeNormal gInverse = new InverseCumulativeNormal(m, std);
            double             var           = gInverse.value(1.0 - percentile);
            NormalDistribution g             = new NormalDistribution(m, std);
            double             result        = m - std * std * g.value(var) / (1.0 - percentile);

            // expectedShortfall must be a loss
            // this means that it has to be MIN(result, 0.0)
            // expectedShortfall must also be a positive quantity, so -MIN(*)
            return(-Math.Min(result, 0.0));
        }
Ejemplo n.º 6
0
        // alternative delta type
        private double strikeFromDelta(double delta, DeltaVolQuote.DeltaType dt)
        {
            double res = 0.0;
            double arg = 0.0;
            InverseCumulativeNormal f = new InverseCumulativeNormal();

            Utils.QL_REQUIRE(delta * phi_ >= 0.0, () => "Option type and delta are incoherent.");

            switch (dt)
            {
            case DeltaVolQuote.DeltaType.Spot:
                Utils.QL_REQUIRE(Math.Abs(delta) <= fDiscount_, () => "Spot delta out of range.");
                arg = -phi_ *f.value(phi_ *delta / fDiscount_) * stdDev_ + 0.5 * stdDev_ * stdDev_;

                res = forward_ * Math.Exp(arg);
                break;

            case DeltaVolQuote.DeltaType.Fwd:
                Utils.QL_REQUIRE(Math.Abs(delta) <= 1.0, () => "Forward delta out of range.");
                arg = -phi_ *f.value(phi_ *delta) * stdDev_ + 0.5 * stdDev_ * stdDev_;

                res = forward_ * Math.Exp(arg);
                break;

            case DeltaVolQuote.DeltaType.PaSpot:
            case DeltaVolQuote.DeltaType.PaFwd:
                // This has to be solved numerically. One of the
                // problems is that the premium adjusted call delta is
                // not monotonic in strike, such that two solutions
                // might occur. The one right to the max of the delta is
                // considered to be the correct strike.  Some proper
                // interval bounds for the strike need to be chosen, the
                // numerics can otherwise be very unreliable and
                // unstable.  I've chosen Brent over Newton, since the
                // interval can be specified explicitly and we can not
                // run into the area on the left of the maximum.  The
                // put delta doesn't have this property and can be
                // solved without any problems, but also numerically.

                BlackDeltaPremiumAdjustedSolverClass f1 = new BlackDeltaPremiumAdjustedSolverClass(
                    ot_, dt, spot_, dDiscount_, fDiscount_, stdDev_, delta);

                Brent solver = new Brent();
                solver.setMaxEvaluations(1000);
                double accuracy = 1.0e-10;

                double rightLimit = 0.0;
                double leftLimit  = 0.0;

                // Strike of not premium adjusted is always to the right of premium adjusted
                if (dt == DeltaVolQuote.DeltaType.PaSpot)
                {
                    rightLimit = strikeFromDelta(delta, DeltaVolQuote.DeltaType.Spot);
                }
                else
                {
                    rightLimit = strikeFromDelta(delta, DeltaVolQuote.DeltaType.Fwd);
                }

                if (phi_ < 0)
                {
                    // if put
                    res = solver.solve(f1, accuracy, rightLimit, 0.0, spot_ * 100.0);
                    break;
                }
                else
                {
                    // find out the left limit which is the strike
                    // corresponding to the value where premium adjusted
                    // deltas have their maximum.

                    BlackDeltaPremiumAdjustedMaxStrikeClass g = new BlackDeltaPremiumAdjustedMaxStrikeClass(
                        ot_, dt, spot_, dDiscount_, fDiscount_, stdDev_);

                    leftLimit = solver.solve(g, accuracy, rightLimit * 0.5, 0.0, rightLimit);

                    double guess = leftLimit + (rightLimit - leftLimit) * 0.5;

                    res = solver.solve(f1, accuracy, guess, leftLimit, rightLimit);
                } // end if phi<0 else

                break;


            default:
                Utils.QL_FAIL("invalid delta type");
                break;
            }

            return(res);
        }
Ejemplo n.º 7
0
        public FdmBlackScholesMesher(int size,
                                     GeneralizedBlackScholesProcess process,
                                     double maturity, double strike,
                                     double?xMinConstraint = null,
                                     double?xMaxConstraint = null,
                                     double eps            = 0.0001,
                                     double scaleFactor    = 1.5,
                                     Pair <double?, double?> cPoint
                                     = null,
                                     DividendSchedule dividendSchedule = null,
                                     FdmQuantoHelper fdmQuantoHelper   = null,
                                     double spotAdjustment             = 0.0)
            : base(size)
        {
            double S = process.x0();

            Utils.QL_REQUIRE(S > 0.0, () => "negative or null underlying given");

            dividendSchedule = dividendSchedule == null ? new DividendSchedule() : dividendSchedule;
            List <pair_double> intermediateSteps = new List <pair_double>();

            for (int i = 0; i < dividendSchedule.Count &&
                 process.time(dividendSchedule[i].date()) <= maturity; ++i)
            {
                intermediateSteps.Add(
                    new pair_double(
                        process.time(dividendSchedule[i].date()),
                        dividendSchedule[i].amount()
                        ));
            }

            int intermediateTimeSteps = (int)Math.Max(2, 24.0 * maturity);

            for (int i = 0; i < intermediateTimeSteps; ++i)
            {
                intermediateSteps.Add(
                    new pair_double((i + 1) * (maturity / intermediateTimeSteps), 0.0));
            }

            intermediateSteps.Sort();

            Handle <YieldTermStructure> rTS = process.riskFreeRate();
            Handle <YieldTermStructure> qTS = fdmQuantoHelper != null
                                          ? new Handle <YieldTermStructure>(
                new QuantoTermStructure(process.dividendYield(),
                                        process.riskFreeRate(),
                                        new Handle <YieldTermStructure>(fdmQuantoHelper.foreignTermStructure()),
                                        process.blackVolatility(),
                                        strike,
                                        new Handle <BlackVolTermStructure>(fdmQuantoHelper.fxVolatilityTermStructure()),
                                        fdmQuantoHelper.exchRateATMlevel(),
                                        fdmQuantoHelper.equityFxCorrelation()))
                                          : process.dividendYield();

            double lastDivTime = 0.0;
            double fwd = S + spotAdjustment;
            double mi = fwd, ma = fwd;

            for (int i = 0; i < intermediateSteps.Count; ++i)
            {
                double divTime   = intermediateSteps[i].first;
                double divAmount = intermediateSteps[i].second;

                fwd = fwd / rTS.currentLink().discount(divTime) * rTS.currentLink().discount(lastDivTime)
                      * qTS.currentLink().discount(divTime) / qTS.currentLink().discount(lastDivTime);

                mi = Math.Min(mi, fwd); ma = Math.Max(ma, fwd);

                fwd -= divAmount;

                mi = Math.Min(mi, fwd); ma = Math.Max(ma, fwd);

                lastDivTime = divTime;
            }

            // Set the grid boundaries
            double normInvEps = new InverseCumulativeNormal().value(1 - eps);
            double sigmaSqrtT
                = process.blackVolatility().currentLink().blackVol(maturity, strike)
                  * Math.Sqrt(maturity);

            double?xMin = Math.Log(mi) - sigmaSqrtT * normInvEps * scaleFactor;
            double?xMax = Math.Log(ma) + sigmaSqrtT * normInvEps * scaleFactor;

            if (xMinConstraint != null)
            {
                xMin = xMinConstraint;
            }
            if (xMaxConstraint != null)
            {
                xMax = xMaxConstraint;
            }

            Fdm1dMesher helper;

            if (cPoint != null &&
                cPoint.first != null &&
                Math.Log(cPoint.first.Value) >= xMin && Math.Log(cPoint.first.Value) <= xMax)
            {
                helper = new Concentrating1dMesher(xMin.Value, xMax.Value, size,
                                                   new Pair <double?, double?>(Math.Log(cPoint.first.Value), cPoint.second));
            }
            else
            {
                helper = new Uniform1dMesher(xMin.Value, xMax.Value, size);
            }

            locations_ = helper.locations();
            for (int i = 0; i < locations_.Count; ++i)
            {
                dplus_[i]  = helper.dplus(i);
                dminus_[i] = helper.dminus(i);
            }
        }