/// <summary> /// Calculate the Option Adjusted Spread (OAS) /// <remarks> /// Calculates the spread that needs to be added to the the /// reference curve so that the theoretical model value /// matches the marketPrice. /// </remarks> /// </summary> /// <param name="cleanPrice"></param> /// <param name="engineTS"></param> /// <param name="dayCounter"></param> /// <param name="compounding"></param> /// <param name="frequency"></param> /// <param name="settlement"></param> /// <param name="accuracy"></param> /// <param name="maxIterations"></param> /// <param name="guess"></param> /// <returns></returns> public double OAS(double cleanPrice, Handle <YieldTermStructure> engineTS, DayCounter dayCounter, Compounding compounding, Frequency frequency, Date settlement = null, double accuracy = 1.0e-10, int maxIterations = 100, double guess = 0.0) { if (settlement == null) { settlement = settlementDate(); } double dirtyPrice = cleanPrice + accruedAmount(settlement); var f = new NpvSpreadHelper(this); OasHelper obj = new OasHelper(f, dirtyPrice); Brent solver = new Brent(); solver.setMaxEvaluations(maxIterations); double step = 0.001; double oas = solver.solve(obj, accuracy, guess, step); return(continuousToConv(oas, this, engineTS, dayCounter, compounding, frequency)); }
public override Lattice tree(TimeGrid grid) { TermStructureFittingParameter phi = new TermStructureFittingParameter(termStructure()); ShortRateDynamics numericDynamics = new Dynamics(phi, a(), sigma()); TrinomialTree trinomial = new TrinomialTree(numericDynamics.process(), grid); ShortRateTree numericTree = new ShortRateTree(trinomial, numericDynamics, grid); TermStructureFittingParameter.NumericalImpl impl = (TermStructureFittingParameter.NumericalImpl)phi.implementation(); impl.reset(); double value = 1.0; double vMin = -50.0; double vMax = 50.0; for (int i = 0; i < (grid.size() - 1); i++) { double discountBond = termStructure().link.discount(grid[i + 1]); double xMin = trinomial.underlying(i, 0); double dx = trinomial.dx(i); Helper finder = new Helper(i, xMin, dx, discountBond, numericTree); Brent s1d = new Brent(); s1d.setMaxEvaluations(1000); value = s1d.solve(finder, 1e-7, value, vMin, vMax); impl.setvalue(grid[i], value); } return(numericTree); }
private double strikeFromPrice(double price, Option.Type optionType, double referenceStrike) { double a, b, min, max, k; if (optionType == Option.Type.Call) { a = swapRateValue_; min = referenceStrike; b = max = k = Math.Min(smileSection_.maxStrike(), shiftedUpperBound_); } else { a = min = k = Math.Max(smileSection_.minStrike(), shiftedLowerBound_); b = swapRateValue_; max = referenceStrike; } PriceHelper h = new PriceHelper(smileSection_, optionType, price); Brent solver = new Brent(); try { k = solver.solve(h, 1.0E-5, swapRateValue_, a, b); } catch (Exception) { // use default value set above } return(Math.Min(Math.Max(k, min), max)); }
//! Black volatility implied by the model public double impliedVolatility(double targetValue, double accuracy, int maxEvaluations, double minVol, double maxVol) { ImpliedVolatilityHelper f = new ImpliedVolatilityHelper(this, targetValue); Brent solver = new Brent(); solver.setMaxEvaluations(maxEvaluations); return(solver.solve(f, accuracy, volatility_.link.value(), minVol, maxVol)); }
public double MonthlyYield() { Brent solver = new Brent(); solver.setMaxEvaluations(100); List <CashFlow> cf = expectedCashflows(); MonthlyYieldFinder objective = new MonthlyYieldFinder(notional(settlementDate()), cf, settlementDate()); return(solver.solve(objective, 1.0e-10, 0.02, 0.0, 1.0) / 100); }
private List <double> spreadsVolImplied() { Brent solver = new Brent(); List <double> result = new InitializedList <double>(nOptionExpiries_, 0.0); double guess = 0.0001, minSpread = -0.1, maxSpread = 0.1; for (int j = 0; j < nOptionExpiries_; ++j) { ObjectiveFunction f = new ObjectiveFunction(stripper1_, caps_[j], atmCapFloorPrices_[j]); solver.setMaxEvaluations(maxEvaluations_); double root = solver.solve(f, accuracy_, guess, minSpread, maxSpread); result[j] = root; } return(result); }
/*! Extrapolation for unknown order of convergence * \param t first scaling factor for the step size * \param s second scaling factor for the step size */ public double value(double t, double s) { Utils.QL_REQUIRE(t > 1 && s > 1, () => "scaling factors must be greater than 1"); Utils.QL_REQUIRE(t > s, () => "t must be greater than s"); double ft = f_(delta_h_ / t); double fs = f_(delta_h_ / s); double k = new Brent().solve(new RichardsonEqn(fdelta_h_, ft, fs, t, s), 1e-8, 0.05, 10); double ts = Math.Pow(s, k); return((ts * fs - fdelta_h_) / (ts - 1.0)); }
public static double calculate(Instrument instrument, IPricingEngine engine, SimpleQuote volQuote, double targetValue, double accuracy, int maxEvaluations, double minVol, double maxVol) { instrument.setupArguments(engine.getArguments()); engine.getArguments().validate(); PriceError f = new PriceError(engine, volQuote, targetValue); Brent solver = new Brent(); solver.setMaxEvaluations(maxEvaluations); double guess = (minVol + maxVol) / 2.0; double result = solver.solve(f, accuracy, guess, minVol, maxVol); return(result); }
protected override double blackVolImpl(double t, double strike) { HestonProcess process = hestonModel_.link.process(); double df = process.riskFreeRate().link.discount(t, true); double div = process.dividendYield().link.discount(t, true); double spotPrice = process.s0().link.value(); double fwd = spotPrice * process.dividendYield().link.discount(t, true) / process.riskFreeRate().link.discount(t, true); var payoff = new PlainVanillaPayoff(fwd > strike ? Option.Type.Put : Option.Type.Call, strike); double kappa = hestonModel_.link.kappa(); double theta = hestonModel_.link.theta(); double rho = hestonModel_.link.rho(); double sigma = hestonModel_.link.sigma(); double v0 = hestonModel_.link.v0(); AnalyticHestonEngine.ComplexLogFormula cpxLogFormula = AnalyticHestonEngine.ComplexLogFormula.Gatheral; AnalyticHestonEngine hestonEnginePtr = null; double?npv = null; int evaluations = 0; AnalyticHestonEngine.doCalculation( df, div, spotPrice, strike, t, kappa, theta, sigma, v0, rho, payoff, integration_, cpxLogFormula, hestonEnginePtr, ref npv, ref evaluations); if (npv <= 0.0) { return(Math.Sqrt(theta)); } Brent solver = new Brent(); solver.setMaxEvaluations(10000); double guess = Math.Sqrt(theta); double accuracy = Const.QL_EPSILON; var f = new ImpliedVolHelper(payoff.optionType(), strike, fwd, t, df, npv.Value); return(solver.solve(f, accuracy, guess, 0.01)); }
/// <summary> /// Returns the Black implied forward yield volatility /// <remarks> /// the forward yield volatility, see Hull, Fourth Edition, /// Chapter 20, pg 536). Relevant only to European put/call /// schedules /// </remarks> /// </summary> /// <param name="targetValue"></param> /// <param name="discountCurve"></param> /// <param name="accuracy"></param> /// <param name="maxEvaluations"></param> /// <param name="minVol"></param> /// <param name="maxVol"></param> /// <returns></returns> public double impliedVolatility(double targetValue, Handle <YieldTermStructure> discountCurve, double accuracy, int maxEvaluations, double minVol, double maxVol) { calculate(); Utils.QL_REQUIRE(!isExpired(), () => "instrument expired"); double guess = 0.5 * (minVol + maxVol); blackDiscountCurve_.linkTo(discountCurve, false); ImpliedVolHelper f = new ImpliedVolHelper(this, targetValue); Brent solver = new Brent(); solver.setMaxEvaluations(maxEvaluations); return(solver.solve(f, accuracy, guess, minVol, maxVol)); }
public double value(double x) { // first find the right side of the interval double upper = guess_; int evaluations = maxEvaluations_; while (nonCentralDist_.value(upper) < x && evaluations > 0) { upper *= 2.0; --evaluations; } // use a brent solver for the rest Brent solver = new Brent(); solver.setMaxEvaluations(evaluations); return(solver.solve(new IncChiQuareFinder(x, nonCentralDist_.value), accuracy_, 0.75 * upper, (evaluations == maxEvaluations_) ? 0.0 : 0.5 * upper, upper)); }
public double value(double x) { CumulativeNormalDistribution phi = new CumulativeNormalDistribution(); double temp = (x - mux_) / sigmax_; double txy = Math.Sqrt(1.0 - rhoxy_ * rhoxy_); Vector lambda = new Vector(size_); int i; for (i = 0; i < size_; i++) { double tau = (i == 0 ? t_[0] - T_ : t_[i] - t_[i - 1]); double c = (i == size_ - 1 ? (1.0 + rate_ * tau) : rate_ * tau); lambda[i] = c * A_[i] * Math.Exp(-Ba_[i] * x); } SolvingFunction function = new SolvingFunction(lambda, Bb_); Brent s1d = new Brent(); s1d.setMaxEvaluations(1000); double yb = s1d.solve(function, 1e-6, 0.00, -100.0, 100.0); double h1 = (yb - muy_) / (sigmay_ * txy) - rhoxy_ * (x - mux_) / (sigmax_ * txy); double value = phi.value(-w_ * h1); for (i = 0; i < size_; i++) { double h2 = h1 + Bb_[i] * sigmay_ *Math.Sqrt(1.0 - rhoxy_ *rhoxy_); double kappa = -Bb_[i] * (muy_ - 0.5 * txy * txy * sigmay_ * sigmay_ * Bb_[i] + rhoxy_ * sigmay_ * (x - mux_) / sigmax_); value -= lambda[i] * Math.Exp(kappa) * phi.value(-w_ * h2); } return(Math.Exp(-0.5 * temp * temp) * value / (sigmax_ * Math.Sqrt(2.0 * QLCore.Const.M_PI))); }
//! Tree build-up + numerical fitting to term-structure public ShortRateTree(TrinomialTree tree, ShortRateDynamics dynamics, TermStructureFittingParameter.NumericalImpl theta, TimeGrid timeGrid) : base(timeGrid, tree.size(1)) { tree_ = tree; dynamics_ = dynamics; theta.reset(); double value = 1.0; double vMin = -100.0; double vMax = 100.0; for (int i = 0; i < (timeGrid.size() - 1); i++) { double discountBond = theta.termStructure().link.discount(t_[i + 1]); Helper finder = new Helper(i, discountBond, theta, this); Brent s1d = new Brent(); s1d.setMaxEvaluations(1000); value = s1d.solve(finder, 1e-7, value, vMin, vMax); theta.change(value); } }
//! implied Z-spread. public static double zSpread(Leg leg, double npv, YieldTermStructure discount, DayCounter dayCounter, Compounding compounding, Frequency frequency, bool includeSettlementDateFlows, Date settlementDate = null, Date npvDate = null, double accuracy = 1.0e-10, int maxIterations = 100, double guess = 0.0) { if (settlementDate == null) { settlementDate = Settings.Instance.evaluationDate(); } if (npvDate == null) { npvDate = settlementDate; } Brent solver = new Brent(); solver.setMaxEvaluations(maxIterations); ZSpreadFinder objFunction = new ZSpreadFinder(leg, discount, npv, dayCounter, compounding, frequency, includeSettlementDateFlows, settlementDate, npvDate); double step = 0.01; return(solver.solve(objFunction, accuracy, guess, step)); }
// alternative delta type private double strikeFromDelta(double delta, DeltaVolQuote.DeltaType dt) { double res = 0.0; double arg = 0.0; InverseCumulativeNormal f = new InverseCumulativeNormal(); Utils.QL_REQUIRE(delta * phi_ >= 0.0, () => "Option type and delta are incoherent."); switch (dt) { case DeltaVolQuote.DeltaType.Spot: Utils.QL_REQUIRE(Math.Abs(delta) <= fDiscount_, () => "Spot delta out of range."); arg = -phi_ *f.value(phi_ *delta / fDiscount_) * stdDev_ + 0.5 * stdDev_ * stdDev_; res = forward_ * Math.Exp(arg); break; case DeltaVolQuote.DeltaType.Fwd: Utils.QL_REQUIRE(Math.Abs(delta) <= 1.0, () => "Forward delta out of range."); arg = -phi_ *f.value(phi_ *delta) * stdDev_ + 0.5 * stdDev_ * stdDev_; res = forward_ * Math.Exp(arg); break; case DeltaVolQuote.DeltaType.PaSpot: case DeltaVolQuote.DeltaType.PaFwd: // This has to be solved numerically. One of the // problems is that the premium adjusted call delta is // not monotonic in strike, such that two solutions // might occur. The one right to the max of the delta is // considered to be the correct strike. Some proper // interval bounds for the strike need to be chosen, the // numerics can otherwise be very unreliable and // unstable. I've chosen Brent over Newton, since the // interval can be specified explicitly and we can not // run into the area on the left of the maximum. The // put delta doesn't have this property and can be // solved without any problems, but also numerically. BlackDeltaPremiumAdjustedSolverClass f1 = new BlackDeltaPremiumAdjustedSolverClass( ot_, dt, spot_, dDiscount_, fDiscount_, stdDev_, delta); Brent solver = new Brent(); solver.setMaxEvaluations(1000); double accuracy = 1.0e-10; double rightLimit = 0.0; double leftLimit = 0.0; // Strike of not premium adjusted is always to the right of premium adjusted if (dt == DeltaVolQuote.DeltaType.PaSpot) { rightLimit = strikeFromDelta(delta, DeltaVolQuote.DeltaType.Spot); } else { rightLimit = strikeFromDelta(delta, DeltaVolQuote.DeltaType.Fwd); } if (phi_ < 0) { // if put res = solver.solve(f1, accuracy, rightLimit, 0.0, spot_ * 100.0); break; } else { // find out the left limit which is the strike // corresponding to the value where premium adjusted // deltas have their maximum. BlackDeltaPremiumAdjustedMaxStrikeClass g = new BlackDeltaPremiumAdjustedMaxStrikeClass( ot_, dt, spot_, dDiscount_, fDiscount_, stdDev_); leftLimit = solver.solve(g, accuracy, rightLimit * 0.5, 0.0, rightLimit); double guess = leftLimit + (rightLimit - leftLimit) * 0.5; res = solver.solve(f1, accuracy, guess, leftLimit, rightLimit); } // end if phi<0 else break; default: Utils.QL_FAIL("invalid delta type"); break; } return(res); }
public Concentrating1dMesher(double start, double end, int size, List <Tuple <double?, double?, bool> > cPoints, double tol = 1e-8) : base(size) { Utils.QL_REQUIRE(end > start, () => "end must be larger than start"); List <double?> points = new List <double?>(), betas = new List <double?>(); foreach (Tuple <double?, double?, bool> iter in cPoints) { points.Add(iter.Item1); betas.Add((iter.Item2 * (end - start)) * (iter.Item2 * (end - start))); } // get scaling factor a so that y(1) = end double aInit = 0.0; for (int i = 0; i < points.Count; ++i) { double c1 = Utils.Asinh((start - points[i].GetValueOrDefault()) / betas[i].GetValueOrDefault()); double c2 = Utils.Asinh((end - points[i].GetValueOrDefault()) / betas[i].GetValueOrDefault()); aInit += (c2 - c1) / points.Count; } OdeIntegrationFct fct = new OdeIntegrationFct(points, betas, tol); double a = new Brent().solve( new OdeSolver(fct, start, 0.0, 1.0, end), tol, aInit, 0.1 * aInit); // solve ODE for all grid points Vector x = new Vector(size), y = new Vector(size); x[0] = 0.0; y[0] = start; double dx = 1.0 / (size - 1); for (int i = 1; i < size; ++i) { x[i] = i * dx; y[i] = fct.solve(a, y[i - 1], x[i - 1], x[i]); } // eliminate numerical noise and ensure y(1) = end double dy = y[y.Count - 1] - end; for (int i = 1; i < size; ++i) { y[i] -= i * dx * dy; } LinearInterpolation odeSolution = new LinearInterpolation(x, x.Count, y); // ensure required points are part of the grid List <Pair <double?, double?> > w = new InitializedList <Pair <double?, double?> > (1, new Pair <double?, double?>(0.0, 0.0)); for (int i = 0; i < points.Count; ++i) { if (cPoints[i].Item3 && points[i] > start && points[i] < end) { int j = y.distance(y[0], y.BinarySearch(points[i].Value)); double e = new Brent().solve( new OdeSolver2(odeSolution.value, points[i].Value), Const.QL_EPSILON, x[j], 0.5 / size); w.Add(new Pair <double?, double?>(Math.Min(x[size - 2], x[j]), e)); } } w.Add(new Pair <double?, double?>(1.0, 1.0)); w = w.OrderBy(xx => xx.first).Distinct(new equal_on_first()).ToList(); List <double> u = new List <double>(w.Count), z = new List <double>(w.Count); for (int i = 0; i < w.Count; ++i) { u[i] = w[i].first.GetValueOrDefault(); z[i] = w[i].second.GetValueOrDefault(); } LinearInterpolation transform = new LinearInterpolation(u, u.Count, z); for (int i = 0; i < size; ++i) { locations_[i] = odeSolution.value(transform.value(i * dx)); } for (int i = 0; i < size - 1; ++i) { dplus_[i] = dminus_[i + 1] = locations_[i + 1] - locations_[i]; } dplus_[dplus_.Count] = null; dminus_[0] = null; }
public override Vector evolve(double t0, Vector x0, double dt, Vector dw) { Vector retVal = new Vector(2); double vol, vol2, mu, nu, dy; double sdt = Math.Sqrt(dt); double sqrhov = Math.Sqrt(1.0 - rho_ * rho_); switch (discretization_) { // For the definition of PartialTruncation, FullTruncation // and Reflection see Lord, R., R. Koekkoek and D. van Dijk (2006), // "A Comparison of biased simulation schemes for // stochastic volatility models", // Working Paper, Tinbergen Institute case Discretization.PartialTruncation: vol = (x0[1] > 0.0) ? Math.Sqrt(x0[1]) : 0.0; vol2 = sigma_ * vol; mu = riskFreeRate_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value() - dividendYield_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value() - 0.5 * vol * vol; nu = kappa_ * (theta_ - x0[1]); retVal[0] = x0[0] * Math.Exp(mu * dt + vol * dw[0] * sdt); retVal[1] = x0[1] + nu * dt + vol2 * sdt * (rho_ * dw[0] + sqrhov * dw[1]); break; case Discretization.FullTruncation: vol = (x0[1] > 0.0) ? Math.Sqrt(x0[1]) : 0.0; vol2 = sigma_ * vol; mu = riskFreeRate_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value() - dividendYield_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value() - 0.5 * vol * vol; nu = kappa_ * (theta_ - vol * vol); retVal[0] = x0[0] * Math.Exp(mu * dt + vol * dw[0] * sdt); retVal[1] = x0[1] + nu * dt + vol2 * sdt * (rho_ * dw[0] + sqrhov * dw[1]); break; case Discretization.Reflection: vol = Math.Sqrt(Math.Abs(x0[1])); vol2 = sigma_ * vol; mu = riskFreeRate_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value() - dividendYield_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value() - 0.5 * vol * vol; nu = kappa_ * (theta_ - vol * vol); retVal[0] = x0[0] * Math.Exp(mu * dt + vol * dw[0] * sdt); retVal[1] = vol * vol + nu * dt + vol2 * sdt * (rho_ * dw[0] + sqrhov * dw[1]); break; case Discretization.NonCentralChiSquareVariance: // use Alan Lewis trick to decorrelate the equity and the variance // process by using y(t)=x(t)-\frac{rho}{sigma}\nu(t) // and Ito's Lemma. Then use exact sampling for the variance // process. For further details please read the Wilmott thread // "QuantLib code is very high quality" vol = (x0[1] > 0.0) ? Math.Sqrt(x0[1]) : 0.0; mu = riskFreeRate_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value() - dividendYield_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value() - 0.5 * vol * vol; retVal[1] = varianceDistribution(x0[1], dw[1], dt); dy = (mu - rho_ / sigma_ * kappa_ * (theta_ - vol * vol)) * dt + vol * sqrhov * dw[0] * sdt; retVal[0] = x0[0] * Math.Exp(dy + rho_ / sigma_ * (retVal[1] - x0[1])); break; case Discretization.QuadraticExponential: case Discretization.QuadraticExponentialMartingale: { // for details of the quadratic exponential discretization scheme // see Leif Andersen, // Efficient Simulation of the Heston Stochastic Volatility Model double ex = Math.Exp(-kappa_ * dt); double m = theta_ + (x0[1] - theta_) * ex; double s2 = x0[1] * sigma_ * sigma_ * ex / kappa_ * (1 - ex) + theta_ * sigma_ * sigma_ / (2 * kappa_) * (1 - ex) * (1 - ex); double psi = s2 / (m * m); double g1 = 0.5; double g2 = 0.5; double k0 = -rho_ * kappa_ * theta_ * dt / sigma_; double k1 = g1 * dt * (kappa_ * rho_ / sigma_ - 0.5) - rho_ / sigma_; double k2 = g2 * dt * (kappa_ * rho_ / sigma_ - 0.5) + rho_ / sigma_; double k3 = g1 * dt * (1 - rho_ * rho_); double k4 = g2 * dt * (1 - rho_ * rho_); double A = k2 + 0.5 * k4; if (psi < 1.5) { double b2 = 2 / psi - 1 + Math.Sqrt(2 / psi * (2 / psi - 1)); double b = Math.Sqrt(b2); double a = m / (1 + b2); if (discretization_ == Discretization.QuadraticExponentialMartingale) { // martingale correction Utils.QL_REQUIRE(A < 1 / (2 * a), () => "illegal value"); k0 = -A * b2 * a / (1 - 2 * A * a) + 0.5 * Math.Log(1 - 2 * A * a) - (k1 + 0.5 * k3) * x0[1]; } retVal[1] = a * (b + dw[1]) * (b + dw[1]); } else { double p = (psi - 1) / (psi + 1); double beta = (1 - p) / m; double u = new CumulativeNormalDistribution().value(dw[1]); if (discretization_ == Discretization.QuadraticExponentialMartingale) { // martingale correction Utils.QL_REQUIRE(A < beta, () => "illegal value"); k0 = -Math.Log(p + beta * (1 - p) / (beta - A)) - (k1 + 0.5 * k3) * x0[1]; } retVal[1] = ((u <= p) ? 0.0 : Math.Log((1 - p) / (1 - u)) / beta); } mu = riskFreeRate_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value() - dividendYield_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value(); retVal[0] = x0[0] * Math.Exp(mu * dt + k0 + k1 * x0[1] + k2 * retVal[1] + Math.Sqrt(k3 * x0[1] + k4 * retVal[1]) * dw[0]); } break; case Discretization.BroadieKayaExactSchemeLobatto: case Discretization.BroadieKayaExactSchemeLaguerre: case Discretization.BroadieKayaExactSchemeTrapezoidal: { double nu_0 = x0[1]; double nu_t = varianceDistribution(nu_0, dw[1], dt); double x = Math.Min(1.0 - Const.QL_EPSILON, Math.Max(0.0, new CumulativeNormalDistribution().value(dw[2]))); cdf_nu_ds_minus_x f = new cdf_nu_ds_minus_x(x, this, nu_0, nu_t, dt, discretization_); double vds = new Brent().solve(f, 1e-5, theta_ * dt, 0.1 * theta_ * dt); double vdw = (nu_t - nu_0 - kappa_ * theta_ * dt + kappa_ * vds) / sigma_; mu = (riskFreeRate_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value() - dividendYield_.link.forwardRate(t0, t0 + dt, Compounding.Continuous).value()) * dt - 0.5 * vds + rho_ * vdw; double sig = Math.Sqrt((1 - rho_ * rho_) * vds); double s = x0[0] * Math.Exp(mu + sig * dw[0]); retVal[0] = s; retVal[1] = nu_t; } break; default: Utils.QL_FAIL("unknown discretization schema"); break; } return(retVal); }
public override void calculate() { Utils.QL_REQUIRE(arguments_.settlementMethod != Settlement.Method.ParYieldCurve, () => "cash-settled (ParYieldCurve) swaptions not priced by Jamshidian engine"); Utils.QL_REQUIRE(arguments_.exercise.type() == Exercise.Type.European, () => "cannot use the Jamshidian decomposition on exotic swaptions"); Utils.QL_REQUIRE(arguments_.swap.spread.IsEqual(0.0), () => "non zero spread (" + arguments_.swap.spread + ") not allowed"); Date referenceDate; DayCounter dayCounter; ITermStructureConsistentModel tsmodel = (ITermStructureConsistentModel)base.model_.link; try { if (tsmodel != null) { referenceDate = tsmodel.termStructure().link.referenceDate(); dayCounter = tsmodel.termStructure().link.dayCounter(); } else { referenceDate = termStructure_.link.referenceDate(); dayCounter = termStructure_.link.dayCounter(); } } catch { referenceDate = termStructure_.link.referenceDate(); dayCounter = termStructure_.link.dayCounter(); } List <double> amounts = new InitializedList <double>(arguments_.fixedCoupons.Count); for (int i = 0; i < amounts.Count; i++) { amounts[i] = arguments_.fixedCoupons[i]; } amounts[amounts.Count - 1] = amounts.Last() + arguments_.nominal; double maturity = dayCounter.yearFraction(referenceDate, arguments_.exercise.date(0)); List <double> fixedPayTimes = new InitializedList <double>(arguments_.fixedPayDates.Count); for (int i = 0; i < fixedPayTimes.Count; i++) { fixedPayTimes[i] = dayCounter.yearFraction(referenceDate, arguments_.fixedPayDates[i]); } rStarFinder finder = new rStarFinder(model_, arguments_.nominal, maturity, fixedPayTimes, amounts); Brent s1d = new Brent(); double minStrike = -10.0; double maxStrike = 10.0; s1d.setMaxEvaluations(10000); s1d.setLowerBound(minStrike); s1d.setUpperBound(maxStrike); double rStar = s1d.solve(finder, 1e-8, 0.05, minStrike, maxStrike); Option.Type w = arguments_.type == VanillaSwap.Type.Payer ? Option.Type.Put : Option.Type.Call; int size = arguments_.fixedCoupons.Count; double value = 0.0; for (int i = 0; i < size; i++) { double fixedPayTime = dayCounter.yearFraction(referenceDate, arguments_.fixedPayDates[i]); double strike = model_.link.discountBond(maturity, fixedPayTime, rStar); double dboValue = model_.link.discountBondOption( w, strike, maturity, fixedPayTime); value += amounts[i] * dboValue; } results_.value = value; }