public double volatility(double strike, VolatilityType volatilityType, double shift = 0.0) { if (volatilityType == volatilityType_ && Utils.close(shift, this.shift())) { return(volatility(strike)); } double?atm = atmLevel(); Utils.QL_REQUIRE(atm != null, () => "smile section must provide atm level to compute converted volatilties"); Option.Type type = strike >= atm ? Option.Type.Call : Option.Type.Put; double premium = optionPrice(strike, type); double premiumAtm = optionPrice(atm.Value, type); if (volatilityType == VolatilityType.ShiftedLognormal) { try { return(Utils.blackFormulaImpliedStdDev(type, strike, atm.Value, premium, 1.0, shift) / Math.Sqrt(exerciseTime())); } catch (Exception) { return(Utils.blackFormulaImpliedStdDevChambers(type, strike, atm.Value, premium, premiumAtm, 1.0, shift) / Math.Sqrt(exerciseTime())); } } else { return(Utils.bachelierBlackFormulaImpliedVol(type, strike, atm.Value, exerciseTime(), premium)); } }
private void initialize() { Utils.QL_REQUIRE(dates_.Count >= interpolator_.requiredPoints, () => "not enough input dates givesn"); Utils.QL_REQUIRE(this.data_.Count == this.dates_.Count, () => "dates/data count mismatch"); Utils.QL_REQUIRE(this.data_[0] == 1.0, () => "the first discount must be == 1.0 " + "to flag the corrsponding date as settlement date"); times_ = new InitializedList <double>(dates_.Count - 1); times_.Add(0.0); for (int i = 1; i < dates_.Count; i++) { Utils.QL_REQUIRE(dates_[i] > dates_[i - 1], () => "invalid date (" + dates_[i] + ", vs " + dates_[i - 1] + ")"); times_[i] = dayCounter().yearFraction(dates_[0], dates_[i]); Utils.QL_REQUIRE(!Utils.close(this.times_[i], this.times_[i - 1]), () => "two dates correspond to the same time " + "under this curve's day count convention"); Utils.QL_REQUIRE(this.data_[i] > 0.0, () => "negative discount"); #if !QL_NEGATIVE_RATES Utils.QL_REQUIRE(this.data_[i] <= this.data_[i - 1], () => "negative forward rate implied by the discount " + this.data_[i] + " at " + dates_[i] + " (t=" + this.times_[i] + ") after the discount " + this.data_[i - 1] + " at " + dates_[i - 1] + " (t=" + this.times_[i - 1] + ")"); #endif } setupInterpolation(); interpolation_.update(); }
/// <summary> /// Time grid interface /// <remarks> /// returns the index i such that grid[i] = t /// </remarks> /// </summary> /// <param name="t"></param> /// <returns></returns> public int index(double t) { int i = closestIndex(t); if (Utils.close(t, times_[i])) { return(i); } Utils.QL_REQUIRE(t >= times_.First(), () => "using inadequate time grid: all nodes are later than the required time t = " + t + " (earliest node is t1 = " + times_.First() + ")"); Utils.QL_REQUIRE(t <= times_.Last(), () => "using inadequate time grid: all nodes are earlier than the required time t = " + t + " (latest node is t1 = " + times_.Last() + ")"); int j, k; if (t > times_[i]) { j = i; k = i + 1; } else { j = i - 1; k = i; } Utils.QL_FAIL("using inadequate time grid: the nodes closest to the required time t = " + t + " are t1 = " + times_[j] + " and t2 = " + times_[k]); return(0); }
public void addFixings(Dictionary <Date, double> source, bool forceOverwrite) { ObservableValue <TimeSeries <double> > target = IndexManager.instance().getHistory(name()); foreach (Date d in source.Keys) { if (isValidFixingDate(d)) { if (!target.value().ContainsKey(d)) { target.value().Add(d, source[d]); } else if (forceOverwrite) { target.value()[d] = source[d]; } else if (Utils.close(target.value()[d], source[d])) { continue; } else { throw new ArgumentException("Duplicated fixing provided: " + d + ", " + source[d] + " while " + target.value()[d] + " value is already present"); } } else { throw new ArgumentException("Invalid fixing provided: " + d.DayOfWeek + " " + d + ", " + source[d]); } } IndexManager.instance().setHistory(name(), target); }
private void initialize() { Utils.QL_REQUIRE(dates_.Count >= interpolator_.requiredPoints, () => "not enough input dates givesn"); Utils.QL_REQUIRE(this.data_.Count == this.dates_.Count, () => "dates/data count mismatch"); times_ = new InitializedList <double>(dates_.Count); times_[0] = 0.0; for (int i = 1; i < dates_.Count; i++) { Utils.QL_REQUIRE(dates_[i] > dates_[i - 1], () => "invalid date (" + dates_[i] + ", vs " + dates_[i - 1] + ")"); times_[i] = dayCounter().yearFraction(dates_[0], dates_[i]); Utils.QL_REQUIRE(!Utils.close(times_[i], times_[i - 1]), () => "two dates correspond to the same time " + "under this curve's day count convention"); #if !QL_NEGATIVE_RATES Utils.QL_REQUIRE(this.data_[i] >= 0.0, () => "negative forward"); #endif } setupInterpolation(); interpolation_.update(); }
// PRIMITIVE /*! indefinite integral of the instantaneous covariance function at * time t between T-fixing and S-fixing rates * \f[ \int f(T-t)f(S-t)dt \f] */ public double primitive(double t, double T, double S) { if (T < t || S < t) { return(0.0); } if (Utils.close(c_, 0.0)) { double v = a_ + d_; return(t * (v * v + v * b_ * S + v * b_ * T - v * b_ * t + b_ * b_ * S * T - 0.5 * b_ * b_ * t * (S + T) + b_ * b_ * t * t / 3.0)); } double k1 = Math.Exp(c_ * t), k2 = Math.Exp(c_ * S), k3 = Math.Exp(c_ * T); return((b_ * b_ * (-1 - 2 * c_ * c_ * S * T - c_ * (S + T) + k1 * k1 * (1 + c_ * (S + T - 2 * t) + 2 * c_ * c_ * (S - t) * (T - t))) + 2 * c_ * c_ * (2 * d_ * a_ * (k2 + k3) * (k1 - 1) + a_ * a_ * (k1 * k1 - 1) + 2 * c_ * d_ * d_ * k2 * k3 * t) + 2 * b_ * c_ * (a_ * (-1 - c_ * (S + T) + k1 * k1 * (1 + c_ * (S + T - 2 * t))) - 2 * d_ * (k3 * (1 + c_ * S) + k2 * (1 + c_ * T) - k1 * k3 * (1 + c_ * (S - t)) - k1 * k2 * (1 + c_ * (T - t))) ) ) / (4 * c_ * c_ * c_ * k2 * k3)); }
public override void partialRollback(DiscretizedAsset asset, double to) { double from = asset.time(); if (Utils.close(from, to)) { return; } Utils.QL_REQUIRE(from > to, () => "cannot roll the asset back to" + to + " (it is already at t = " + from + ")"); int iFrom = t_.index(from); int iTo = t_.index(to); for (int i = iFrom - 1; i >= iTo; --i) { Vector newValues = new Vector(impl().size(i)); impl().stepback(i, asset.values(), newValues); asset.setTime(t_[i]); asset.setValues(newValues); // skip the very last adjustment if (i != iTo) { asset.adjustValues(); } } }
// Stores historical fixings from a TimeSeries // The dates in the TimeSeries must be the actual calendar dates of the fixings; no settlement days must be used. public void addFixings(TimeSeries <double?> source, bool forceOverwrite = false) { checkNativeFixingsAllowed(); TimeSeries <double?> target = IndexManager.instance().getHistory(name()); foreach (Date d in source.Keys) { if (isValidFixingDate(d)) { if (!target.ContainsKey(d)) { target.Add(d, source[d]); } else if (forceOverwrite) { target[d] = source[d]; } else if (Utils.close(target[d].GetValueOrDefault(), source[d].GetValueOrDefault())) { continue; } else { throw new ArgumentException("Duplicated fixing provided: " + d + ", " + source[d] + " while " + target[d] + " value is already present"); } } else { throw new ArgumentException("Invalid fixing provided: " + d.DayOfWeek + " " + d + ", " + source[d]); } } IndexManager.instance().setHistory(name(), target); }
public InterpolatedZeroInflationCurve(Date referenceDate, Calendar calendar, DayCounter dayCounter, Period lag, Frequency frequency, bool indexIsInterpolated, Handle <YieldTermStructure> yTS, List <Date> dates, List <double> rates, Interpolator interpolator = default(Interpolator)) : base(referenceDate, calendar, dayCounter, rates[0], lag, frequency, indexIsInterpolated, yTS) { times_ = new List <double>(); dates_ = dates; data_ = rates; interpolator_ = interpolator ?? new Interpolator(); Utils.QL_REQUIRE(dates_.Count > 1, () => "too few dates: " + dates_.Count); // check that the data starts from the beginning, // i.e. referenceDate - lag, at least must be in the relevant // period KeyValuePair <Date, Date> lim = Utils.inflationPeriod(yTS.link.referenceDate() - this.observationLag(), frequency); Utils.QL_REQUIRE(lim.Key <= dates_[0] && dates_[0] <= lim.Value, () => "first data date is not in base period, date: " + dates_[0] + " not within [" + lim.Key + "," + lim.Value + "]"); // by convention, if the index is not interpolated we pull all the dates // back to the start of their inflationPeriods // otherwise the time calculations will be inconsistent if (!indexIsInterpolated_) { for (int i = 0; i < dates_.Count; i++) { dates_[i] = Utils.inflationPeriod(dates_[i], frequency).Key; } } Utils.QL_REQUIRE(this.data_.Count == dates_.Count, () => "indices/dates count mismatch: " + this.data_.Count + " vs " + dates_.Count); this.times_ = new InitializedList <double>(dates_.Count); this.times_[0] = timeFromReference(dates_[0]); for (int i = 1; i < dates_.Count; i++) { Utils.QL_REQUIRE(dates_[i] > dates_[i - 1], () => "dates not sorted"); // but must be greater than -1 Utils.QL_REQUIRE(this.data_[i] > -1.0, () => "zero inflation data < -100 %"); // this can be negative this.times_[i] = timeFromReference(dates_[i]); Utils.QL_REQUIRE(!Utils.close(this.times_[i], this.times_[i - 1]), () => "two dates correspond to the same time " + "under this curve's day count convention"); } this.interpolation_ = this.interpolator_.interpolate(times_, times_.Count, data_); this.interpolation_.update(); }
/*! This method will be invoked after rollback and after any * other asset had their chance to look at the values. For * instance, payments happening at the present time (and therefore * not included in an option to be exercised at this time) will be * added here. * * This method is not virtual; derived classes must override * the protected postAdjustValuesImpl() method instead. */ public void postAdjustValues() { if (!Utils.close(time(), latestPostAdjustment_)) { postAdjustValuesImpl(); latestPostAdjustment_ = time(); } }
private void initialize(Compounding compounding, Frequency frequency, Date refDate = null) { Utils.QL_REQUIRE(dates_.Count >= interpolator_.requiredPoints, () => "not enough input dates given"); Utils.QL_REQUIRE(data_.Count == dates_.Count, () => "dates/yields count mismatch"); times_ = new List <double>(dates_.Count); double offset = 0.0; if (refDate != null) { offset = dayCounter().yearFraction(refDate, dates_[0]); } times_.Add(offset); if (compounding != Compounding.Continuous) { // We also have to convert the first rate. // The first time is 0.0, so we can't use it. // We fall back to about one day. double dt = 1.0 / 365; InterestRate r = new InterestRate(data_[0], dayCounter(), compounding, frequency); data_[0] = r.equivalentRate(Compounding.Continuous, Frequency.NoFrequency, dt).value(); #if !QL_NEGATIVE_RATES Utils.QL_REQUIRE(data_[0] > 0.0, () => "non-positive yield"); #endif } for (int i = 1; i < dates_.Count; i++) { Utils.QL_REQUIRE(dates_[i] > dates_[i - 1], () => "invalid date (" + dates_[i] + ", vs " + dates_[i - 1] + ")"); times_.Add(dayCounter().yearFraction(refDate ?? dates_[0], dates_[i])); Utils.QL_REQUIRE(!Utils.close(times_[i], times_[i - 1]), () => "two dates correspond to the same time " + "under this curve's day count convention"); // adjusting zero rates to match continuous compounding if (compounding != Compounding.Continuous) { InterestRate r = new InterestRate(data_[i], dayCounter(), compounding, frequency); data_[i] = r.equivalentRate(Compounding.Continuous, Frequency.NoFrequency, times_[i]).value(); } #if !QL_NEGATIVE_RATES Utils.QL_REQUIRE(data_[i] > 0.0, () => "non-positive yield"); // positive yields are not enough to ensure non-negative fwd rates // so here's a stronger requirement Utils.QL_REQUIRE(data_[i] * times_[i] - data_[i - 1] * times_[i - 1] >= 0.0, () => "negative forward rate implied by the zero yield " + data_[i] + " at " + dates_[i] + " (t=" + times_[i] + ") after the zero yield " + data_[i - 1] + " at " + dates_[i - 1] + " (t=" + times_[i - 1] + ")"); #endif } setupInterpolation(); interpolation_.update(); }
/*! This method returns the zero of the function \f$ f \f$, determined with the given accuracy \f$ \epsilon \f$; * depending on the particular solver, this might mean that the returned \f$ x \f$ is such that \f$ |f(x)| < \epsilon * \f$, or that \f$ |x-\xi| < \epsilon \f$ where \f$ \xi \f$ is the real zero. * * An initial guess must be supplied, as well as two values \f$ x_\mathrm{min} \f$ and \f$ x_\mathrm{max} \f$ which * must bracket the zero (i.e., either \f$ f(x_\mathrm{min}) \leq 0 \leq f(x_\mathrm{max}) \f$, or \f$ * f(x_\mathrm{max}) \leq 0 \leq f(x_\mathrm{min}) \f$ must be true). */ public double solve(ISolver1d f, double accuracy, double guess, double xMin, double xMax) { if (accuracy <= 0.0) { throw new ArgumentException("accuracy (" + accuracy + ") must be positive"); } // check whether we really want to use epsilon accuracy = Math.Max(accuracy, Const.QL_EPSILON); xMin_ = xMin; xMax_ = xMax; if (!(xMin_ < xMax_)) { throw new ArgumentException("invalid range: xMin_ (" + xMin_ + ") >= xMax_ (" + xMax_ + ")"); } if (!(!lowerBoundEnforced_ || xMin_ >= lowerBound_)) { throw new ArgumentException("xMin_ (" + xMin_ + ") < enforced low bound (" + lowerBound_ + ")"); } if (!(!upperBoundEnforced_ || xMax_ <= upperBound_)) { throw new ArgumentException("xMax_ (" + xMax_ + ") > enforced hi bound (" + upperBound_ + ")"); } fxMin_ = f.value(xMin_); if (Utils.close(fxMin_, 0.0)) { return(xMin_); } fxMax_ = f.value(xMax_); if (Utils.close(fxMax_, 0.0)) { return(xMax_); } evaluationNumber_ = 2; if (!(fxMin_ * fxMax_ < 0.0)) { throw new ArgumentException("root not bracketed: f[" + xMin_ + "," + xMax_ + "] -> [" + fxMin_ + "," + fxMax_ + "]"); } if (!(guess > xMin_)) { throw new ArgumentException("guess (" + guess + ") < xMin_ (" + xMin_ + ")"); } if (!(guess < xMax_)) { throw new ArgumentException("guess (" + guess + ") > xMax_ (" + xMax_ + ")"); } root_ = guess; return(solveImpl(f, accuracy)); }
protected override double solveImpl(ISolver1d f, double xAccuracy) { /* The implementation of the algorithm was inspired by * Press, Teukolsky, Vetterling, and Flannery, * "Numerical Recipes in C", 2nd edition, * Cambridge University Press */ double fl, fh, xl, xh, dx, del, froot; // Identify the limits so that xl corresponds to the low side if (fxMin_ < 0.0) { xl = xMin_; fl = fxMin_; xh = xMax_; fh = fxMax_; } else { xl = xMax_; fl = fxMax_; xh = xMin_; fh = fxMin_; } dx = xh - xl; while (evaluationNumber_ <= maxEvaluations_) { // Increment with respect to latest value root_ = xl + dx * fl / (fl - fh); froot = f.value(root_); evaluationNumber_++; if (froot < 0.0) // Replace appropriate limit { del = xl - root_; xl = root_; fl = froot; } else { del = xh - root_; xh = root_; fh = froot; } dx = xh - xl; // Convergence criterion if (Math.Abs(del) < xAccuracy || Utils.close(froot, 0.0)) { return(root_); } } Utils.QL_FAIL("maximum number of function evaluations (" + maxEvaluations_ + ") exceeded", QLNetExceptionEnum.MaxNumberFuncEvalExceeded); return(0); }
protected void calculateNotionalsFromCashflows() { notionalSchedule_.Clear(); notionals_.Clear(); Date lastPaymentDate = new Date(); //notionalSchedule_.Add((Coupon)(cashflows_[0])accrualStartDate()); for (int i = 0; i < cashflows_.Count; ++i) { Coupon coupon = cashflows_[i] as Coupon; if (coupon == null) { continue; } if (i == 0) { notionalSchedule_.Add(coupon.accrualStartDate()); } double notional = coupon.nominal(); // we add the notional only if it is the first one... if (notionals_.empty()) { notionals_.Add(coupon.nominal()); lastPaymentDate = coupon.date(); } else if (!Utils.close(notional, notionals_.Last())) { // ...or if it has changed. if (!(notional < notionals_.Last())) { throw new ApplicationException("increasing coupon notionals"); } notionals_.Add(coupon.nominal()); // in this case, we also add the last valid date for // the previous one... notionalSchedule_.Add(lastPaymentDate); // ...and store the candidate for this one. lastPaymentDate = coupon.date(); } else { // otherwise, we just extend the valid range of dates // for the current notional. lastPaymentDate = coupon.date(); } } if (notionals_.empty()) { throw new ApplicationException("no coupons provided"); } notionals_.Add(0.0); notionalSchedule_.Add(lastPaymentDate); }
//public InterpolatedDiscountCurve(List<Date> dates, List<double> discounts, DayCounter dayCounter, // Calendar cal = Calendar(), Interpolator interpolator = Interpolator()) public InterpolatedDiscountCurve(List <Date> dates, List <double> discounts, DayCounter dayCounter, Calendar cal, List <Handle <Quote> > jumps = null, List <Date> jumpDates = null, Interpolator interpolator = default(Interpolator)) : base(dates.First(), cal, dayCounter, jumps, jumpDates) { times_ = new List <double>(); data_ = discounts; interpolator_ = interpolator; dates_ = dates; if (dates_.empty()) { throw new ApplicationException("no input dates given"); } if (data_.empty()) { throw new ApplicationException("no input discount factors given"); } if (data_.Count != dates_.Count) { throw new ApplicationException("dates/discount factors count mismatch"); } if (data_[0] != 1.0) { throw new ApplicationException("the first discount must be == 1.0 " + "to flag the corrsponding date as settlement date"); } times_ = new InitializedList <double>(dates_.Count - 1); times_.Add(0.0); for (int i = 1; i < dates_.Count; i++) { if (!(dates_[i] > dates_[i - 1])) { throw new ApplicationException("invalid date (" + dates_[i] + ", vs " + dates_[i - 1] + ")"); } if (!(data_[i] > 0.0)) { throw new ApplicationException("negative discount"); } times_[i] = dayCounter.yearFraction(dates_[0], dates_[i]); if (Utils.close(times_[i], times_[i - 1])) { throw new ApplicationException("two dates correspond to the same time " + "under this curve's day count convention"); } } setupInterpolation(); interpolation_.update(); }
public bool isInRange(double x, double y) { double x1 = xMin(), x2 = xMax(); bool xIsInrange = (x >= x1 && x <= x2) || Utils.close(x, x1) || Utils.close(x, x2); if (!xIsInrange) { return(false); } double y1 = yMin(), y2 = yMax(); return((y >= y1 && y <= y2) || Utils.close(y, y1) || Utils.close(y, y2)); }
public static double unsafeSabrVolatility(double strike, double forward, double expiryTime, double alpha, double beta, double nu, double rho) { double oneMinusBeta = 1.0 - beta; double A = Math.Pow(forward * strike, oneMinusBeta); double sqrtA = Math.Sqrt(A); double logM; if (!Utils.close(forward, strike)) { logM = Math.Log(forward / strike); } else { double epsilon = (forward - strike) / strike; logM = epsilon - .5 * epsilon * epsilon; } double z = (nu / alpha) * sqrtA * logM; double B = 1.0 - 2.0 * rho * z + z * z; double C = oneMinusBeta * oneMinusBeta * logM * logM; double tmp = (Math.Sqrt(B) + z - rho) / (1.0 - rho); double xx = Math.Log(tmp); double D = sqrtA * (1.0 + C / 24.0 + C * C / 1920.0); double d = 1.0 + expiryTime * (oneMinusBeta * oneMinusBeta * alpha * alpha / (24.0 * A) + 0.25 * rho * beta * nu * alpha / sqrtA + (2.0 - 3.0 * rho * rho) * (nu * nu / 24.0)); double multiplier; // computations become precise enough if the square of z worth slightly more than the precision machine (hence the m) const double m = 10; if (Math.Abs(z * z) > Const.QL_EPSILON * m) { multiplier = z / xx; } else { alpha = (0.5 - rho * rho) / (1.0 - rho); beta = alpha - .5; double gamma = rho / (1 - rho); multiplier = 1.0 - beta * z + (gamma - alpha + beta * beta * .5) * z * z; } return((alpha / D) * multiplier * d); }
private void initialize() { Utils.QL_REQUIRE(dates_.Count >= interpolator_.requiredPoints, () => "not enough input dates given"); Utils.QL_REQUIRE(this.data_.Count == dates_.Count, () => "dates/data count mismatch"); this.times_.Add(0.0); for (int i = 1; i < dates_.Count; ++i) { Utils.QL_REQUIRE(dates_[i] > dates_[i - 1], () => "invalid date (" + dates_[i] + ", vs " + dates_[i - 1] + ")"); this.times_.Add(dayCounter().yearFraction(dates_[0], dates_[i])); Utils.QL_REQUIRE(!Utils.close(this.times_[i], this.times_[i - 1]), () => "two dates correspond to the same time " + "under this curve's day count convention"); Utils.QL_REQUIRE(this.data_[i] >= 0.0, () => "negative hazard rate"); } setupInterpolation(); this.interpolation_.update(); }
protected override double solveImpl(ISolver1d f, double xAccuracy) { /* The implementation of the algorithm was inspired by * Press, Teukolsky, Vetterling, and Flannery, * "Numerical Recipes in C", 2nd edition, Cambridge * University Press */ double fl, froot, dx, xl; // Pick the bound with the smaller function value // as the most recent guess if (Math.Abs(fxMin_) < Math.Abs(fxMax_)) { root_ = xMin_; froot = fxMin_; xl = xMax_; fl = fxMax_; } else { root_ = xMax_; froot = fxMax_; xl = xMin_; fl = fxMin_; } while (evaluationNumber_ <= maxEvaluations_) { dx = (xl - root_) * froot / (froot - fl); xl = root_; fl = froot; root_ += dx; froot = f.value(root_); ++evaluationNumber_; if (Math.Abs(dx) < xAccuracy || Utils.close(froot, 0.0)) { return(root_); } } Utils.QL_FAIL("maximum number of function evaluations (" + maxEvaluations_ + ") exceeded", QLNetExceptionEnum.MaxNumberFuncEvalExceeded); return(0); }
public CallableFixedRateBond(int settlementDays, double faceAmount, Schedule schedule, List <double> coupons, DayCounter accrualDayCounter, BusinessDayConvention paymentConvention = BusinessDayConvention.Following, double redemption = 100.0, Date issueDate = null, CallabilitySchedule putCallSchedule = null) : base(settlementDays, schedule, accrualDayCounter, issueDate, putCallSchedule) { if (putCallSchedule == null) { putCallSchedule = new CallabilitySchedule(); } frequency_ = schedule.tenor().frequency(); bool isZeroCouponBond = (coupons.Count == 1 && Utils.close(coupons[0], 0.0)); if (!isZeroCouponBond) { cashflows_ = new FixedRateLeg(schedule) .withCouponRates(coupons, accrualDayCounter) .withNotionals(faceAmount) .withPaymentAdjustment(paymentConvention); addRedemptionsToCashflows(new List <double>() { redemption }); } else { Date redemptionDate = calendar_.adjust(maturityDate_, paymentConvention); setSingleRedemption(faceAmount, redemption, redemptionDate); } // used for impliedVolatility() calculation SimpleQuote dummyVolQuote = new SimpleQuote(0.0); blackVolQuote_.linkTo(dummyVolQuote); blackEngine_ = new BlackCallableFixedRateBondEngine(blackVolQuote_, blackDiscountCurve_); }
public InterpolatedForwardCurve(List <Date> dates, List <double> yields, DayCounter dayCounter, List <Handle <Quote> > jumps = null, List <Date> jumpDates = null, Interpolator interpolator = default(Interpolator)) : base(dates.First(), new Calendar(), dayCounter, jumps, jumpDates) { times_ = new List <double>(); dates_ = dates; data_ = yields; interpolator_ = interpolator; if (!(dates_.Count > 1)) { throw new ApplicationException("too few dates"); } if (data_.Count != dates_.Count) { throw new ApplicationException("dates/yields count mismatch"); } times_ = new List <double>(dates_.Count); times_.Add(0.0); for (int i = 1; i < dates_.Count; i++) { if (!(dates_[i] > dates_[i - 1])) { throw new ApplicationException("invalid date (" + dates_[i] + ", vs " + dates_[i - 1] + ")"); } //#if !defined(QL_NEGATIVE_RATES) //QL_REQUIRE(this->data_[i] >= 0.0, "negative forward"); //#endif times_[i] = dayCounter.yearFraction(dates_[0], dates_[i]); if (Utils.close(times_[i], times_[i - 1])) { throw new ApplicationException("two dates correspond to the same time " + "under this curve's day count convention"); } } setupInterpolation(); interpolation_.update(); }
protected override double solveImpl(ISolver1d f, double xAccuracy) { /* The implementation of the algorithm was inspired by * Press, Teukolsky, Vetterling, and Flannery, * "Numerical Recipes in C", 2nd edition, Cambridge * University Press */ double dx, xMid, fMid; // Orient the search so that f>0 lies at root_+dx if (fxMin_ < 0.0) { dx = xMax_ - xMin_; root_ = xMin_; } else { dx = xMin_ - xMax_; root_ = xMax_; } while (evaluationNumber_ <= maxEvaluations_) { dx /= 2.0; xMid = root_ + dx; fMid = f.value(xMid); evaluationNumber_++; if (fMid <= 0.0) { root_ = xMid; } if (Math.Abs(dx) < xAccuracy || Utils.close(fMid, 0.0)) { return(root_); } } Utils.QL_FAIL("maximum number of function evaluations (" + maxEvaluations_ + ") exceeded", QLNetExceptionEnum.MaxNumberFuncEvalExceeded); return(0); }
//! \name Time grid interface //! returns the index i such that grid[i] = t public int index(double t) { int i = closestIndex(t); if (Utils.close(t, times_[i])) { return(i); } else { if (t < times_.First()) { throw new ApplicationException("using inadequate time grid: all nodes are later than the required time t = " + t + " (earliest node is t1 = " + times_.First() + ")"); } else if (t > times_.Last()) { throw new ApplicationException("using inadequate time grid: all nodes are earlier than the required time t = " + t + " (latest node is t1 = " + times_.Last() + ")"); } else { int j, k; if (t > times_[i]) { j = i; k = i + 1; } else { j = i - 1; k = i; } throw new ApplicationException("using inadequate time grid: the nodes closest to the required time t = " + t + " are t1 = " + times_[j] + " and t2 = " + times_[k]); } } }
public InterpolatedSurvivalProbabilityCurve(List <Date> dates, List <double> probabilities, DayCounter dayCounter, Calendar calendar = null, List <Handle <Quote> > jumps = null, List <Date> jumpDates = null, Interpolator interpolator = default(Interpolator)) : base(dates[0], calendar, dayCounter, jumps, jumpDates) { dates_ = dates; Utils.QL_REQUIRE(dates_.Count >= interpolator.requiredPoints, () => "not enough input dates given"); Utils.QL_REQUIRE(this.data_.Count == dates_.Count, () => "dates/data count mismatch"); Utils.QL_REQUIRE(this.data_[0].IsEqual(1.0), () => "the first probability must be == 1.0 to flag the corresponding date as reference date"); this.times_ = new InitializedList <double>(dates_.Count); this.times_[0] = 0.0; for (int i = 1; i < dates_.Count; ++i) { Utils.QL_REQUIRE(dates_[i] > dates_[i - 1], () => "invalid date (" + dates_[i] + ", vs " + dates_[i - 1] + ")"); this.times_[i] = dayCounter.yearFraction(dates_[0], dates_[i]); Utils.QL_REQUIRE(!Utils.close(this.times_[i], this.times_[i - 1]), () => "two dates correspond to the same time under this curve's day count convention"); Utils.QL_REQUIRE(this.data_[i] > 0.0, () => "negative probability"); Utils.QL_REQUIRE(this.data_[i] <= this.data_[i - 1], () => "negative hazard rate implied by the survival probability " + this.data_[i] + " at " + dates_[i] + " (t=" + this.times_[i] + ") after the survival probability " + this.data_[i - 1] + " at " + dates_[i - 1] + " (t=" + this.times_[i - 1] + ")"); } this.interpolation_ = this.interpolator_.interpolate(this.times_, this.times_.Count, this.data_); this.interpolation_.update(); }
public Vector adjustedGrid() { double t = time(); Vector grid = method().grid(t); // add back all dividend amounts in the future for (var i = 0; i < arguments_.dividends.Count; i++) { double dividendTime = dividendTimes_[i]; if (dividendTime >= t || Utils.close(dividendTime, t)) { Dividend d = arguments_.dividends[i]; double dividendDiscount = process_.riskFreeRate().currentLink().discount(dividendTime) / process_.riskFreeRate().currentLink().discount(t); for (var j = 0; j < grid.size(); j++) { grid[j] += d.amount(grid[j]) * dividendDiscount; } } } return(grid); }
/*! This method checks whether the asset was rolled at the given time. */ protected bool isOnTime(double t) { TimeGrid grid = method().timeGrid(); return(Utils.close(grid[grid.index(t)], time())); }
public Concentrating1dMesher(double start, double end, int size, Pair <double?, double?> cPoints = null, bool requireCPoint = false) : base(size) { Utils.QL_REQUIRE(end > start, () => "end must be larger than start"); if (cPoints == null) { cPoints = new Pair <double?, double?>(); } double?cPoint = cPoints.first; double?density = cPoints.second == null ? null : cPoints.second * (end - start); Utils.QL_REQUIRE(cPoint == null || (cPoint >= start && cPoint <= end), () => "cPoint must be between start and end"); Utils.QL_REQUIRE(density == null || density > 0.0, () => "density > 0 required"); Utils.QL_REQUIRE(cPoint == null || density != null, () => "density must be given if cPoint is given"); Utils.QL_REQUIRE(!requireCPoint || cPoint != null, () => "cPoint is required in grid but not given"); double dx = 1.0 / (size - 1); if (cPoint != null) { List <double> u = new List <double>(); List <double> z = new List <double>(); Interpolation transform = null; double c1 = Utils.Asinh((start - cPoint.Value) / density.GetValueOrDefault()); double c2 = Utils.Asinh((end - cPoint.Value) / density.GetValueOrDefault()); if (requireCPoint) { u.Add(0.0); z.Add(0.0); if (!Utils.close(cPoint.Value, start) && !Utils.close(cPoint.Value, end)) { double z0 = -c1 / (c2 - c1); double u0 = Math.Max( Math.Min(Convert.ToInt32(z0 * (size - 1) + 0.5), Convert.ToInt32(size) - 2), 1) / (Convert.ToDouble(size - 1)); u.Add(u0); z.Add(z0); } u.Add(1.0); z.Add(1.0); transform = new LinearInterpolation(u, u.Count, z); } for (int i = 1; i < size - 1; ++i) { double li = requireCPoint ? transform.value(i * dx) : i * dx; locations_[i] = cPoint.Value + density.GetValueOrDefault() * Math.Sinh(c1 * (1.0 - li) + c2 * li); } } else { for (int i = 1; i < size - 1; ++i) { locations_[i] = start + i * dx * (end - start); } } locations_[0] = start; locations_[locations_.Count - 1] = end; for (int i = 0; i < size - 1; ++i) { dplus_[i] = dminus_[i + 1] = locations_[i + 1] - locations_[i]; } dplus_[dplus_.Count - 1] = null; dminus_[0] = null; }
protected override double solveImpl(ISolver1d f, double xAcc) { /* The implementation of the algorithm was inspired by * Press, Teukolsky, Vetterling, and Flannery, * "Numerical Recipes in C", 2nd edition, Cambridge * University Press */ double fxMid, froot, s, xMid, nextRoot; // test on Black-Scholes implied volatility show that // Ridder solver algorithm actually provides an // accuracy 100 times below promised double xAccuracy = xAcc / 100.0; // Any highly unlikely value, to simplify logic below root_ = double.MinValue; while (evaluationNumber_ <= maxEvaluations_) { xMid = 0.5 * (xMin_ + xMax_); // First of two function evaluations per iteraton fxMid = f.value(xMid); ++evaluationNumber_; s = Math.Sqrt(fxMid * fxMid - fxMin_ * fxMax_); if (Utils.close(s, 0.0)) { return(root_); } // Updating formula nextRoot = xMid + (xMid - xMin_) * ((fxMin_ >= fxMax_ ? 1.0 : -1.0) * fxMid / s); if (Math.Abs(nextRoot - root_) <= xAccuracy) { return(root_); } root_ = nextRoot; // Second of two function evaluations per iteration froot = f.value(root_); ++evaluationNumber_; if (Utils.close(froot, 0.0)) { return(root_); } // Bookkeeping to keep the root bracketed on next iteration if (sign(fxMid, froot).IsNotEqual(fxMid)) { xMin_ = xMid; fxMin_ = fxMid; xMax_ = root_; fxMax_ = froot; } else if (sign(fxMin_, froot).IsNotEqual(fxMin_)) { xMax_ = root_; fxMax_ = froot; } else if (sign(fxMax_, froot).IsNotEqual(fxMax_)) { xMin_ = root_; fxMin_ = froot; } else { Utils.QL_FAIL("never get here."); } if (Math.Abs(xMax_ - xMin_) <= xAccuracy) { return(root_); } } throw new ArgumentException("maximum number of function evaluations (" + maxEvaluations_ + ") exceeded"); }
public override void calculate() { DayCounter rfdc = process_.riskFreeRate().link.dayCounter(); DayCounter divdc = process_.dividendYield().link.dayCounter(); DayCounter voldc = process_.blackVolatility().link.dayCounter(); Calendar volcal = process_.blackVolatility().link.calendar(); double s0 = process_.stateVariable().link.value(); Utils.QL_REQUIRE(s0 > 0.0, () => "negative or null underlying given"); double v = process_.blackVolatility().link.blackVol(arguments_.exercise.lastDate(), s0); Date maturityDate = arguments_.exercise.lastDate(); double r = process_.riskFreeRate().link.zeroRate(maturityDate, rfdc, Compounding.Continuous, Frequency.NoFrequency).value(); double q = process_.dividendYield().link.zeroRate(maturityDate, divdc, Compounding.Continuous, Frequency.NoFrequency).value(); Date referenceDate = process_.riskFreeRate().link.referenceDate(); // binomial trees with constant coefficient Handle <YieldTermStructure> flatRiskFree = new Handle <YieldTermStructure>(new FlatForward(referenceDate, r, rfdc)); Handle <YieldTermStructure> flatDividends = new Handle <YieldTermStructure>(new FlatForward(referenceDate, q, divdc)); Handle <BlackVolTermStructure> flatVol = new Handle <BlackVolTermStructure>(new BlackConstantVol(referenceDate, volcal, v, voldc)); StrikedTypePayoff payoff = arguments_.payoff as StrikedTypePayoff; Utils.QL_REQUIRE(payoff != null, () => "non-striked payoff given"); double maturity = rfdc.yearFraction(referenceDate, maturityDate); StochasticProcess1D bs = new GeneralizedBlackScholesProcess(process_.stateVariable(), flatDividends, flatRiskFree, flatVol); // correct timesteps to ensure a (local) minimum, using Boyle and Lau // approach. See Journal of Derivatives, 1/1994, // "Bumping up against the barrier with the binomial method" // Note: this approach works only for CoxRossRubinstein lattices, so // is disabled if T is not a CoxRossRubinstein or derived from it. int optimum_steps = timeSteps_; if (maxTimeSteps_ > timeSteps_ && s0 > 0 && arguments_.barrier > 0) // boost::is_base_of<CoxRossRubinstein, T>::value && { double divisor; if (s0 > arguments_.barrier) { divisor = Math.Pow(Math.Log(s0 / arguments_.barrier.Value), 2); } else { divisor = Math.Pow(Math.Log(arguments_.barrier.Value / s0), 2); } if (!Utils.close(divisor, 0)) { for (int i = 1; i < timeSteps_; ++i) { int optimum = (int)((i * i * v * v * maturity) / divisor); if (timeSteps_ < optimum) { optimum_steps = optimum; break; // found first minimum with iterations>=timesteps } } } if (optimum_steps > maxTimeSteps_) { optimum_steps = maxTimeSteps_; // too high, limit } } TimeGrid grid = new TimeGrid(maturity, optimum_steps); ITree tree = getTree_(bs, maturity, optimum_steps, payoff.strike()); BlackScholesLattice <ITree> lattice = new BlackScholesLattice <ITree>(tree, r, maturity, optimum_steps); DiscretizedAsset option = getAsset_(arguments_, process_, grid); option.initialize(lattice, maturity); // Partial derivatives calculated from various points in the // binomial tree // (see J.C.Hull, "Options, Futures and other derivatives", 6th edition, pp 397/398) // Rollback to third-last step, and get underlying prices (s2) & // option values (p2) at this point option.rollback(grid[2]); Vector va2 = new Vector(option.values()); Utils.QL_REQUIRE(va2.size() == 3, () => "Expect 3 nodes in grid at second step"); double p2u = va2[2]; // up double p2m = va2[1]; // mid double p2d = va2[0]; // down (low) double s2u = lattice.underlying(2, 2); // up price double s2m = lattice.underlying(2, 1); // middle price double s2d = lattice.underlying(2, 0); // down (low) price // calculate gamma by taking the first derivate of the two deltas double delta2u = (p2u - p2m) / (s2u - s2m); double delta2d = (p2m - p2d) / (s2m - s2d); double gamma = (delta2u - delta2d) / ((s2u - s2d) / 2); // Rollback to second-last step, and get option values (p1) at // this point option.rollback(grid[1]); Vector va = new Vector(option.values()); Utils.QL_REQUIRE(va.size() == 2, () => "Expect 2 nodes in grid at first step"); double p1u = va[1]; double p1d = va[0]; double s1u = lattice.underlying(1, 1); // up (high) price double s1d = lattice.underlying(1, 0); // down (low) price double delta = (p1u - p1d) / (s1u - s1d); // Finally, rollback to t=0 option.rollback(0.0); double p0 = option.presentValue(); // Store results results_.value = p0; results_.delta = delta; results_.gamma = gamma; // theta can be approximated by calculating the numerical derivative // between mid value at third-last step and at t0. The underlying price // is the same, only time varies. results_.theta = (p2m - p0) / grid[2]; }
public bool isInRange(double x) { double x1 = xMin(), x2 = xMax(); return((x >= x1 && x <= x2) || Utils.close(x, x1) || Utils.close(x, x2)); }