public void testObservability() { // Testing volatility cube observability CommonVars vars = new CommonVars(); List <List <Handle <Quote> > > parametersGuess = new InitializedList <List <Handle <Quote> > >(vars.cube.tenors.options.Count * vars.cube.tenors.swaps.Count); for (int i = 0; i < vars.cube.tenors.options.Count * vars.cube.tenors.swaps.Count; i++) { parametersGuess[i] = new InitializedList <Handle <Quote> >(4); parametersGuess[i][0] = new Handle <Quote>(new SimpleQuote(0.2)); parametersGuess[i][1] = new Handle <Quote>(new SimpleQuote(0.5)); parametersGuess[i][2] = new Handle <Quote>(new SimpleQuote(0.4)); parametersGuess[i][3] = new Handle <Quote>(new SimpleQuote(0.0)); } List <bool> isParameterFixed = new InitializedList <bool>(4, false); SwaptionVolCube1x volCube1_0, volCube1_1; // VolCube created before change of reference date volCube1_0 = new SwaptionVolCube1x(vars.atmVolMatrix, vars.cube.tenors.options, vars.cube.tenors.swaps, vars.cube.strikeSpreads, vars.cube.volSpreadsHandle, vars.swapIndexBase, vars.shortSwapIndexBase, vars.vegaWeighedSmileFit, parametersGuess, isParameterFixed, true); Date referenceDate = Settings.evaluationDate(); Settings.setEvaluationDate(vars.conventions.calendar.advance(referenceDate, new Period(1, TimeUnit.Days), vars.conventions.optionBdc)); // VolCube created after change of reference date volCube1_1 = new SwaptionVolCube1x(vars.atmVolMatrix, vars.cube.tenors.options, vars.cube.tenors.swaps, vars.cube.strikeSpreads, vars.cube.volSpreadsHandle, vars.swapIndexBase, vars.shortSwapIndexBase, vars.vegaWeighedSmileFit, parametersGuess, isParameterFixed, true); double dummyStrike = 0.03; for (int i = 0; i < vars.cube.tenors.options.Count; i++) { for (int j = 0; j < vars.cube.tenors.swaps.Count; j++) { for (int k = 0; k < vars.cube.strikeSpreads.Count; k++) { double v0 = volCube1_0.volatility(vars.cube.tenors.options[i], vars.cube.tenors.swaps[j], dummyStrike + vars.cube.strikeSpreads[k], false); double v1 = volCube1_1.volatility(vars.cube.tenors.options[i], vars.cube.tenors.swaps[j], dummyStrike + vars.cube.strikeSpreads[k], false); if (Math.Abs(v0 - v1) > 1e-14) { QAssert.Fail(" option tenor = " + vars.cube.tenors.options[i] + " swap tenor = " + vars.cube.tenors.swaps[j] + " strike = " + (dummyStrike + vars.cube.strikeSpreads[k]) + " v0 = " + (v0) + " v1 = " + (v1) + " error = " + Math.Abs(v1 - v0)); } } } } Settings.setEvaluationDate(referenceDate); SwaptionVolCube2 volCube2_0, volCube2_1; // VolCube created before change of reference date volCube2_0 = new SwaptionVolCube2(vars.atmVolMatrix, vars.cube.tenors.options, vars.cube.tenors.swaps, vars.cube.strikeSpreads, vars.cube.volSpreadsHandle, vars.swapIndexBase, vars.shortSwapIndexBase, vars.vegaWeighedSmileFit); Settings.setEvaluationDate(vars.conventions.calendar.advance(referenceDate, new Period(1, TimeUnit.Days), vars.conventions.optionBdc)); // VolCube created after change of reference date volCube2_1 = new SwaptionVolCube2(vars.atmVolMatrix, vars.cube.tenors.options, vars.cube.tenors.swaps, vars.cube.strikeSpreads, vars.cube.volSpreadsHandle, vars.swapIndexBase, vars.shortSwapIndexBase, vars.vegaWeighedSmileFit); for (int i = 0; i < vars.cube.tenors.options.Count; i++) { for (int j = 0; j < vars.cube.tenors.swaps.Count; j++) { for (int k = 0; k < vars.cube.strikeSpreads.Count; k++) { double v0 = volCube2_0.volatility(vars.cube.tenors.options[i], vars.cube.tenors.swaps[j], dummyStrike + vars.cube.strikeSpreads[k], false); double v1 = volCube2_1.volatility(vars.cube.tenors.options[i], vars.cube.tenors.swaps[j], dummyStrike + vars.cube.strikeSpreads[k], false); if (Math.Abs(v0 - v1) > 1e-14) { QAssert.Fail(" option tenor = " + vars.cube.tenors.options[i] + " swap tenor = " + vars.cube.tenors.swaps[j] + " strike = " + (dummyStrike + vars.cube.strikeSpreads[k]) + " v0 = " + (v0) + " v1 = " + (v1) + " error = " + Math.Abs(v1 - v0)); } } } } Settings.setEvaluationDate(referenceDate); }
public void testDefaultProbability() { // Testing default-probability structure... double hazardRate = 0.0100; Handle <Quote> hazardRateQuote = new Handle <Quote>(new SimpleQuote(hazardRate)); DayCounter dayCounter = new Actual360(); Calendar calendar = new TARGET(); int n = 20; double tolerance = 1.0e-10; Date today = Settings.evaluationDate(); Date startDate = today; Date endDate = startDate; FlatHazardRate flatHazardRate = new FlatHazardRate(startDate, hazardRateQuote, dayCounter); for (int i = 0; i < n; i++) { startDate = endDate; endDate = calendar.advance(endDate, 1, TimeUnit.Years); double pStart = flatHazardRate.defaultProbability(startDate); double pEnd = flatHazardRate.defaultProbability(endDate); double pBetweenComputed = flatHazardRate.defaultProbability(startDate, endDate); double pBetween = pEnd - pStart; if (Math.Abs(pBetween - pBetweenComputed) > tolerance) { QAssert.Fail("Failed to reproduce probability(d1, d2) " + "for default probability structure\n" + " calculated probability: " + pBetweenComputed + "\n" + " expected probability: " + pBetween); } double t2 = dayCounter.yearFraction(today, endDate); double timeProbability = flatHazardRate.defaultProbability(t2); double dateProbability = flatHazardRate.defaultProbability(endDate); if (Math.Abs(timeProbability - dateProbability) > tolerance) { QAssert.Fail("single-time probability and single-date probability do not match\n" + " time probability: " + timeProbability + "\n" + " date probability: " + dateProbability); } double t1 = dayCounter.yearFraction(today, startDate); timeProbability = flatHazardRate.defaultProbability(t1, t2); dateProbability = flatHazardRate.defaultProbability(startDate, endDate); if (Math.Abs(timeProbability - dateProbability) > tolerance) { QAssert.Fail("double-time probability and double-date probability do not match\n" + " time probability: " + timeProbability + "\n" + " date probability: " + dateProbability); } } }
public void testSimpleCovarianceModels() { // Testing simple covariance models const int size = 10; const double tolerance = 1e-14; int i; LmCorrelationModel corrModel = new LmExponentialCorrelationModel(size, 0.1); Matrix recon = corrModel.correlation(0.0, null) - corrModel.pseudoSqrt(0.0, null) * Matrix.transpose(corrModel.pseudoSqrt(0.0, null)); for (i = 0; i < size; ++i) { for (int j = 0; j < size; ++j) { if (Math.Abs(recon[i, j]) > tolerance) { QAssert.Fail("Failed to reproduce correlation matrix" + "\n calculated: " + recon[i, j] + "\n expected: " + 0); } } } List <double> fixingTimes = new InitializedList <double>(size); for (i = 0; i < size; ++i) { fixingTimes[i] = 0.5 * i; } const double a = 0.2; const double b = 0.1; const double c = 2.1; const double d = 0.3; LmVolatilityModel volaModel = new LmLinearExponentialVolatilityModel(fixingTimes, a, b, c, d); LfmCovarianceProxy covarProxy = new LfmCovarianceProxy(volaModel, corrModel); LiborForwardModelProcess process = new LiborForwardModelProcess(size, makeIndex()); LiborForwardModel liborModel = new LiborForwardModel(process, volaModel, corrModel); for (double t = 0; t < 4.6; t += 0.31) { recon = covarProxy.covariance(t, null) - covarProxy.diffusion(t, null) * Matrix.transpose(covarProxy.diffusion(t, null)); for (int k = 0; k < size; ++k) { for (int j = 0; j < size; ++j) { if (Math.Abs(recon[k, j]) > tolerance) { QAssert.Fail("Failed to reproduce correlation matrix" + "\n calculated: " + recon[k, j] + "\n expected: " + 0); } } } Vector volatility = volaModel.volatility(t, null); for (int k = 0; k < size; ++k) { double expected = 0; if (k > 2 * t) { double T = fixingTimes[k]; expected = (a * (T - t) + d) * Math.Exp(-b * (T - t)) + c; } if (Math.Abs(expected - volatility[k]) > tolerance) { QAssert.Fail("Failed to reproduce volatities" + "\n calculated: " + volatility[k] + "\n expected: " + expected); } } } }
public void testCachedMarketValue() { // Testing credit-default swap against cached market values... using (SavedSettings backup = new SavedSettings()) { Settings.setEvaluationDate(new Date(9, Month.June, 2006)); Date evalDate = Settings.evaluationDate(); Calendar calendar = new UnitedStates(); List <Date> discountDates = new List <Date>(); discountDates.Add(evalDate); discountDates.Add(calendar.advance(evalDate, 1, TimeUnit.Weeks, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 1, TimeUnit.Months, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 2, TimeUnit.Months, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 3, TimeUnit.Months, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 6, TimeUnit.Months, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 12, TimeUnit.Months, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 2, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 3, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 4, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 5, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 6, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 7, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 8, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 9, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 10, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); discountDates.Add(calendar.advance(evalDate, 15, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); List <double> dfs = new List <double>(); dfs.Add(1.0); dfs.Add(0.9990151375768731); dfs.Add(0.99570502636871183); dfs.Add(0.99118260474528685); dfs.Add(0.98661167950906203); dfs.Add(0.9732592953359388); dfs.Add(0.94724424481038083); dfs.Add(0.89844996737120875); dfs.Add(0.85216647839921411); dfs.Add(0.80775477692556874); dfs.Add(0.76517289234200347); dfs.Add(0.72401019553182933); dfs.Add(0.68503909569219212); dfs.Add(0.64797499814013748); dfs.Add(0.61263171936255534); dfs.Add(0.5791942350748791); dfs.Add(0.43518868769953606); DayCounter curveDayCounter = new Actual360(); RelinkableHandle <YieldTermStructure> discountCurve = new RelinkableHandle <YieldTermStructure>(); discountCurve.linkTo(new InterpolatedDiscountCurve <LogLinear>(discountDates, dfs, curveDayCounter, null, null, null, new LogLinear())); DayCounter dayCounter = new Thirty360(); List <Date> dates = new List <Date>(); dates.Add(evalDate); dates.Add(calendar.advance(evalDate, 6, TimeUnit.Months, BusinessDayConvention.ModifiedFollowing)); dates.Add(calendar.advance(evalDate, 1, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); dates.Add(calendar.advance(evalDate, 2, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); dates.Add(calendar.advance(evalDate, 3, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); dates.Add(calendar.advance(evalDate, 4, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); dates.Add(calendar.advance(evalDate, 5, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); dates.Add(calendar.advance(evalDate, 7, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); dates.Add(calendar.advance(evalDate, 10, TimeUnit.Years, BusinessDayConvention.ModifiedFollowing)); List <double> defaultProbabilities = new List <double>(); defaultProbabilities.Add(0.0000); defaultProbabilities.Add(0.0047); defaultProbabilities.Add(0.0093); defaultProbabilities.Add(0.0286); defaultProbabilities.Add(0.0619); defaultProbabilities.Add(0.0953); defaultProbabilities.Add(0.1508); defaultProbabilities.Add(0.2288); defaultProbabilities.Add(0.3666); List <double> hazardRates = new List <double>(); hazardRates.Add(0.0); for (int i = 1; i < dates.Count; ++i) { double t1 = dayCounter.yearFraction(dates[0], dates[i - 1]); double t2 = dayCounter.yearFraction(dates[0], dates[i]); double S1 = 1.0 - defaultProbabilities[i - 1]; double S2 = 1.0 - defaultProbabilities[i]; hazardRates.Add(Math.Log(S1 / S2) / (t2 - t1)); } RelinkableHandle <DefaultProbabilityTermStructure> piecewiseFlatHazardRate = new RelinkableHandle <DefaultProbabilityTermStructure>(); piecewiseFlatHazardRate.linkTo(new InterpolatedHazardRateCurve <BackwardFlat>(dates, hazardRates, new Thirty360())); // Testing credit default swap // Build the schedule Date issueDate = new Date(20, Month.March, 2006); Date maturity = new Date(20, Month.June, 2013); Frequency cdsFrequency = Frequency.Semiannual; BusinessDayConvention cdsConvention = BusinessDayConvention.ModifiedFollowing; Schedule schedule = new Schedule(issueDate, maturity, new Period(cdsFrequency), calendar, cdsConvention, cdsConvention, DateGeneration.Rule.Forward, false); // Build the CDS double recoveryRate = 0.25; double fixedRate = 0.0224; DayCounter dayCount = new Actual360(); double cdsNotional = 100.0; CreditDefaultSwap cds = new CreditDefaultSwap(Protection.Side.Seller, cdsNotional, fixedRate, schedule, cdsConvention, dayCount, true, true); cds.setPricingEngine(new MidPointCdsEngine(piecewiseFlatHazardRate, recoveryRate, discountCurve)); double calculatedNpv = cds.NPV(); double calculatedFairRate = cds.fairSpread(); double npv = -1.364048777; // from Bloomberg we have 98.15598868 - 100.00; double fairRate = 0.0248429452; // from Bloomberg we have 0.0258378; double tolerance = 1e-9; if (Math.Abs(npv - calculatedNpv) > tolerance) { QAssert.Fail( "Failed to reproduce the npv for the given credit-default swap\n" + " computed NPV: " + calculatedNpv + "\n" + " Given NPV: " + npv); } if (Math.Abs(fairRate - calculatedFairRate) > tolerance) { QAssert.Fail("Failed to reproduce the fair rate for the given credit-default swap\n" + " computed fair rate: " + calculatedFairRate + "\n" + " Given fair rate: " + fairRate); } } }
public void testCachedValue() { // Testing credit-default swap against cached values... using (SavedSettings backup = new SavedSettings()) { // Initialize curves Settings.setEvaluationDate(new Date(9, Month.June, 2006)); Date today = Settings.evaluationDate(); Calendar calendar = new TARGET(); Handle <Quote> hazardRate = new Handle <Quote>(new SimpleQuote(0.01234)); RelinkableHandle <DefaultProbabilityTermStructure> probabilityCurve = new RelinkableHandle <DefaultProbabilityTermStructure>(); probabilityCurve.linkTo(new FlatHazardRate(0, calendar, hazardRate, new Actual360())); RelinkableHandle <YieldTermStructure> discountCurve = new RelinkableHandle <YieldTermStructure>(); discountCurve.linkTo(new FlatForward(today, 0.06, new Actual360())); // Build the schedule Date issueDate = calendar.advance(today, -1, TimeUnit.Years); Date maturity = calendar.advance(issueDate, 10, TimeUnit.Years); Frequency frequency = Frequency.Semiannual; BusinessDayConvention convention = BusinessDayConvention.ModifiedFollowing; Schedule schedule = new Schedule(issueDate, maturity, new Period(frequency), calendar, convention, convention, DateGeneration.Rule.Forward, false); // Build the CDS double fixedRate = 0.0120; DayCounter dayCount = new Actual360(); double notional = 10000.0; double recoveryRate = 0.4; CreditDefaultSwap cds = new CreditDefaultSwap(Protection.Side.Seller, notional, fixedRate, schedule, convention, dayCount, true, true); cds.setPricingEngine(new MidPointCdsEngine(probabilityCurve, recoveryRate, discountCurve)); double npv = 295.0153398; double fairRate = 0.007517539081; double calculatedNpv = cds.NPV(); double calculatedFairRate = cds.fairSpread(); double tolerance = 1.0e-7; if (Math.Abs(calculatedNpv - npv) > tolerance) { QAssert.Fail( "Failed to reproduce NPV with mid-point engine\n" + " calculated NPV: " + calculatedNpv + "\n" + " expected NPV: " + npv); } if (Math.Abs(calculatedFairRate - fairRate) > tolerance) { QAssert.Fail( "Failed to reproduce fair rate with mid-point engine\n" + " calculated fair rate: " + calculatedFairRate + "\n" + " expected fair rate: " + fairRate); } cds.setPricingEngine(new IntegralCdsEngine(new Period(1, TimeUnit.Days), probabilityCurve, recoveryRate, discountCurve)); calculatedNpv = cds.NPV(); calculatedFairRate = cds.fairSpread(); tolerance = 1.0e-5; if (Math.Abs(calculatedNpv - npv) > notional * tolerance * 10) { QAssert.Fail( "Failed to reproduce NPV with integral engine " + "(step = 1 day)\n" + " calculated NPV: " + calculatedNpv + "\n" + " expected NPV: " + npv); } if (Math.Abs(calculatedFairRate - fairRate) > tolerance) { QAssert.Fail( "Failed to reproduce fair rate with integral engine " + "(step = 1 day)\n" + " calculated fair rate: " + calculatedFairRate + "\n" + " expected fair rate: " + fairRate); } cds.setPricingEngine(new IntegralCdsEngine(new Period(1, TimeUnit.Weeks), probabilityCurve, recoveryRate, discountCurve)); calculatedNpv = cds.NPV(); calculatedFairRate = cds.fairSpread(); tolerance = 1.0e-5; if (Math.Abs(calculatedNpv - npv) > notional * tolerance * 10) { QAssert.Fail( "Failed to reproduce NPV with integral engine " + "(step = 1 week)\n" + " calculated NPV: " + calculatedNpv + "\n" + " expected NPV: " + npv); } if (Math.Abs(calculatedFairRate - fairRate) > tolerance) { QAssert.Fail( "Failed to reproduce fair rate with integral engine " + "(step = 1 week)\n" + " calculated fair rate: " + calculatedFairRate + "\n" + " expected fair rate: " + fairRate); } } }
public void testThirty360_BondBasis() { // Testing thirty/360 day counter (Bond Basis) // http://www.isda.org/c_and_a/docs/30-360-2006ISDADefs.xls // Source: 2006 ISDA Definitions, Sec. 4.16 (f) // 30/360 (or Bond Basis) DayCounter dayCounter = new Thirty360(Thirty360.Thirty360Convention.BondBasis); List <Date> testStartDates = new List <Date>(); List <Date> testEndDates = new List <Date>(); int calculated; // ISDA - Example 1: End dates do not involve the last day of February testStartDates.Add(new Date(20, Month.August, 2006)); testEndDates.Add(new Date(20, Month.February, 2007)); testStartDates.Add(new Date(20, Month.February, 2007)); testEndDates.Add(new Date(20, Month.August, 2007)); testStartDates.Add(new Date(20, Month.August, 2007)); testEndDates.Add(new Date(20, Month.February, 2008)); testStartDates.Add(new Date(20, Month.February, 2008)); testEndDates.Add(new Date(20, Month.August, 2008)); testStartDates.Add(new Date(20, Month.August, 2008)); testEndDates.Add(new Date(20, Month.February, 2009)); testStartDates.Add(new Date(20, Month.February, 2009)); testEndDates.Add(new Date(20, Month.August, 2009)); // ISDA - Example 2: End dates include some end-February dates testStartDates.Add(new Date(31, Month.August, 2006)); testEndDates.Add(new Date(28, Month.February, 2007)); testStartDates.Add(new Date(28, Month.February, 2007)); testEndDates.Add(new Date(31, Month.August, 2007)); testStartDates.Add(new Date(31, Month.August, 2007)); testEndDates.Add(new Date(29, Month.February, 2008)); testStartDates.Add(new Date(29, Month.February, 2008)); testEndDates.Add(new Date(31, Month.August, 2008)); testStartDates.Add(new Date(31, Month.August, 2008)); testEndDates.Add(new Date(28, Month.February, 2009)); testStartDates.Add(new Date(28, Month.February, 2009)); testEndDates.Add(new Date(31, Month.August, 2009)); //// ISDA - Example 3: Miscellaneous calculations testStartDates.Add(new Date(31, Month.January, 2006)); testEndDates.Add(new Date(28, Month.February, 2006)); testStartDates.Add(new Date(30, Month.January, 2006)); testEndDates.Add(new Date(28, Month.February, 2006)); testStartDates.Add(new Date(28, Month.February, 2006)); testEndDates.Add(new Date(3, Month.March, 2006)); testStartDates.Add(new Date(14, Month.February, 2006)); testEndDates.Add(new Date(28, Month.February, 2006)); testStartDates.Add(new Date(30, Month.September, 2006)); testEndDates.Add(new Date(31, Month.October, 2006)); testStartDates.Add(new Date(31, Month.October, 2006)); testEndDates.Add(new Date(28, Month.November, 2006)); testStartDates.Add(new Date(31, Month.August, 2007)); testEndDates.Add(new Date(28, Month.February, 2008)); testStartDates.Add(new Date(28, Month.February, 2008)); testEndDates.Add(new Date(28, Month.August, 2008)); testStartDates.Add(new Date(28, Month.February, 2008)); testEndDates.Add(new Date(30, Month.August, 2008)); testStartDates.Add(new Date(28, Month.February, 2008)); testEndDates.Add(new Date(31, Month.August, 2008)); testStartDates.Add(new Date(26, Month.February, 2007)); testEndDates.Add(new Date(28, Month.February, 2008)); testStartDates.Add(new Date(26, Month.February, 2007)); testEndDates.Add(new Date(29, Month.February, 2008)); testStartDates.Add(new Date(29, Month.February, 2008)); testEndDates.Add(new Date(28, Month.February, 2009)); testStartDates.Add(new Date(28, Month.February, 2008)); testEndDates.Add(new Date(30, Month.March, 2008)); testStartDates.Add(new Date(28, Month.February, 2008)); testEndDates.Add(new Date(31, Month.March, 2008)); int[] expected = { 180, 180, 180, 180, 180, 180, 178, 183, 179, 182, 178, 183, 28, 28, 5, 14, 30, 28, 178, 180, 182, 183, 362, 363, 359, 32, 33 }; for (int i = 0; i < testStartDates.Count; i++) { calculated = dayCounter.dayCount(testStartDates[i], testEndDates[i]); if (calculated != expected[i]) { QAssert.Fail("from " + testStartDates[i] + " to " + testEndDates[i] + ":\n" + " calculated: " + calculated + "\n" + " expected: " + expected[i]); } } }
public void testActualActual() { SingleCase[] testCases = { // first example new SingleCase(ActualActual.Convention.ISDA, new Date(1, Month.November, 2003), new Date(1, Month.May, 2004), 0.497724380567), new SingleCase(ActualActual.Convention.ISMA, new Date(1, Month.November, 2003), new Date(1, Month.May, 2004), new Date(1, Month.November, 2003), new Date(1, Month.May, 2004), 0.500000000000), new SingleCase(ActualActual.Convention.AFB, new Date(1, Month.November, 2003), new Date(1, Month.May, 2004), 0.497267759563), // short first calculation period (first period) new SingleCase(ActualActual.Convention.ISDA, new Date(1, Month.February, 1999), new Date(1, Month.July, 1999), 0.410958904110), new SingleCase(ActualActual.Convention.ISMA, new Date(1, Month.February, 1999), new Date(1, Month.July, 1999), new Date(1, Month.July, 1998), new Date(1, Month.July, 1999), 0.410958904110), new SingleCase(ActualActual.Convention.AFB, new Date(1, Month.February, 1999), new Date(1, Month.July, 1999), 0.410958904110), // short first calculation period (second period) new SingleCase(ActualActual.Convention.ISDA, new Date(1, Month.July, 1999), new Date(1, Month.July, 2000), 1.001377348600), new SingleCase(ActualActual.Convention.ISMA, new Date(1, Month.July, 1999), new Date(1, Month.July, 2000), new Date(1, Month.July, 1999), new Date(1, Month.July, 2000), 1.000000000000), new SingleCase(ActualActual.Convention.AFB, new Date(1, Month.July, 1999), new Date(1, Month.July, 2000), 1.000000000000), // long first calculation period (first period) new SingleCase(ActualActual.Convention.ISDA, new Date(15, Month.August, 2002), new Date(15, Month.July, 2003), 0.915068493151), new SingleCase(ActualActual.Convention.ISMA, new Date(15, Month.August, 2002), new Date(15, Month.July, 2003), new Date(15, Month.January, 2003), new Date(15, Month.July, 2003), 0.915760869565), new SingleCase(ActualActual.Convention.AFB, new Date(15, Month.August, 2002), new Date(15, Month.July, 2003), 0.915068493151), // long first calculation period (second period) /* Warning: the ISDA case is in disagreement with mktc1198.pdf */ new SingleCase(ActualActual.Convention.ISDA, new Date(15, Month.July, 2003), new Date(15, Month.January, 2004), 0.504004790778), new SingleCase(ActualActual.Convention.ISMA, new Date(15, Month.July, 2003), new Date(15, Month.January, 2004), new Date(15, Month.July, 2003), new Date(15, Month.January, 2004), 0.500000000000), new SingleCase(ActualActual.Convention.AFB, new Date(15, Month.July, 2003), new Date(15, Month.January, 2004), 0.504109589041), // short final calculation period (penultimate period) new SingleCase(ActualActual.Convention.ISDA, new Date(30, Month.July, 1999), new Date(30, Month.January, 2000), 0.503892506924), new SingleCase(ActualActual.Convention.ISMA, new Date(30, Month.July, 1999), new Date(30, Month.January, 2000), new Date(30, Month.July, 1999), new Date(30, Month.January, 2000), 0.500000000000), new SingleCase(ActualActual.Convention.AFB, new Date(30, Month.July, 1999), new Date(30, Month.January, 2000), 0.504109589041), // short final calculation period (final period) new SingleCase(ActualActual.Convention.ISDA, new Date(30, Month.January, 2000), new Date(30, Month.June, 2000), 0.415300546448), new SingleCase(ActualActual.Convention.ISMA, new Date(30, Month.January, 2000), new Date(30, Month.June, 2000), new Date(30, Month.January, 2000), new Date(30, Month.July, 2000), 0.417582417582), new SingleCase(ActualActual.Convention.AFB, new Date(30, Month.January, 2000), new Date(30, Month.June, 2000), 0.41530054644) }; int n = testCases.Length; /// sizeof(SingleCase); for (int i = 0; i < n; i++) { ActualActual dayCounter = new ActualActual(testCases[i]._convention); Date d1 = testCases[i]._start; Date d2 = testCases[i]._end; Date rd1 = testCases[i]._refStart; Date rd2 = testCases[i]._refEnd; double calculated = dayCounter.yearFraction(d1, d2, rd1, rd2); if (Math.Abs(calculated - testCases[i]._result) > 1.0e-10) { QAssert.Fail(dayCounter.name() + "period: " + d1 + " to " + d2 + " calculated: " + calculated + " expected: " + testCases[i]._result); } } }
public void testASXDates() { //Testing ASX dates..."); String[] ASXcodes = { "F0", "G0", "H0", "J0", "K0", "M0", "N0", "Q0", "U0", "V0", "X0", "Z0", "F1", "G1", "H1", "J1", "K1", "M1", "N1", "Q1", "U1", "V1", "X1", "Z1", "F2", "G2", "H2", "J2", "K2", "M2", "N2", "Q2", "U2", "V2", "X2", "Z2", "F3", "G3", "H3", "J3", "K3", "M3", "N3", "Q3", "U3", "V3", "X3", "Z3", "F4", "G4", "H4", "J4", "K4", "M4", "N4", "Q4", "U4", "V4", "X4", "Z4", "F5", "G5", "H5", "J5", "K5", "M5", "N5", "Q5", "U5", "V5", "X5", "Z5", "F6", "G6", "H6", "J6", "K6", "M6", "N6", "Q6", "U6", "V6", "X6", "Z6", "F7", "G7", "H7", "J7", "K7", "M7", "N7", "Q7", "U7", "V7", "X7", "Z7", "F8", "G8", "H8", "J8", "K8", "M8", "N8", "Q8", "U8", "V8", "X8", "Z8", "F9", "G9", "H9", "J9", "K9", "M9", "N9", "Q9", "U9", "V9", "X9", "Z9" }; Date counter = Date.minDate(); // 10 years of futures must not exceed Date::maxDate Date last = Date.maxDate() - new Period(121, TimeUnit.Months); Date asx; while (counter <= last) { asx = ASX.nextDate(counter, false); // check that asx is greater than counter if (asx <= counter) { QAssert.Fail(asx.weekday() + " " + asx + " is not greater than " + counter.weekday() + " " + counter); } // check that asx is an ASX date if (!ASX.isASXdate(asx, false)) { QAssert.Fail(asx.weekday() + " " + asx + " is not an ASX date (calculated from " + counter.weekday() + " " + counter + ")"); } // check that asx is <= to the next ASX date in the main cycle if (asx > ASX.nextDate(counter, true)) { QAssert.Fail(asx.weekday() + " " + asx + " is not less than or equal to the next future in the main cycle " + ASX.nextDate(counter, true)); } // check that for every date ASXdate is the inverse of ASXcode if (ASX.date(ASX.code(asx), counter) != asx) { QAssert.Fail(ASX.code(asx) + " at calendar day " + counter + " is not the ASX code matching " + asx); } // check that for every date the 120 ASX codes refer to future dates for (int i = 0; i < 120; ++i) { if (ASX.date(ASXcodes[i], counter) < counter) { QAssert.Fail(ASX.date(ASXcodes[i], counter) + " is wrong for " + ASXcodes[i] + " at reference date " + counter); } } counter = counter + 1; } }
public void testECBDates() { // Testing ECB dates List <Date> knownDates = ECB.knownDates(); if (knownDates.empty()) { QAssert.Fail("Empty EBC date vector"); } int n = ECB.nextDates(Date.minDate()).Count; if (n != knownDates.Count) { QAssert.Fail("NextDates(minDate) returns " + n + " instead of " + knownDates.Count + " dates"); } Date previousEcbDate = Date.minDate(), currentEcbDate, ecbDateMinusOne; for (int i = 0; i < knownDates.Count; ++i) { currentEcbDate = knownDates[i]; if (!ECB.isECBdate(currentEcbDate)) { QAssert.Fail(currentEcbDate + " fails isECBdate check"); } ecbDateMinusOne = currentEcbDate - 1; if (ECB.isECBdate(ecbDateMinusOne)) { QAssert.Fail(ecbDateMinusOne + " fails isECBdate check"); } if (ECB.nextDate(ecbDateMinusOne) != currentEcbDate) { QAssert.Fail("Next EBC date following " + ecbDateMinusOne + " must be " + currentEcbDate); } if (ECB.nextDate(previousEcbDate) != currentEcbDate) { QAssert.Fail("Next EBC date following " + previousEcbDate + " must be " + currentEcbDate); } previousEcbDate = currentEcbDate; } Date knownDate = knownDates.First(); ECB.removeDate(knownDate); if (ECB.isECBdate(knownDate)) { QAssert.Fail("Unable to remove an EBC date"); } ECB.addDate(knownDate); if (!ECB.isECBdate(knownDate)) { QAssert.Fail("Unable to add an EBC date"); } }
public void testIMMDates() { // ("Testing IMM dates..."); string[] IMMcodes = new string[] { "F0", "G0", "H0", "J0", "K0", "M0", "N0", "Q0", "U0", "V0", "X0", "Z0", "F1", "G1", "H1", "J1", "K1", "M1", "N1", "Q1", "U1", "V1", "X1", "Z1", "F2", "G2", "H2", "J2", "K2", "M2", "N2", "Q2", "U2", "V2", "X2", "Z2", "F3", "G3", "H3", "J3", "K3", "M3", "N3", "Q3", "U3", "V3", "X3", "Z3", "F4", "G4", "H4", "J4", "K4", "M4", "N4", "Q4", "U4", "V4", "X4", "Z4", "F5", "G5", "H5", "J5", "K5", "M5", "N5", "Q5", "U5", "V5", "X5", "Z5", "F6", "G6", "H6", "J6", "K6", "M6", "N6", "Q6", "U6", "V6", "X6", "Z6", "F7", "G7", "H7", "J7", "K7", "M7", "N7", "Q7", "U7", "V7", "X7", "Z7", "F8", "G8", "H8", "J8", "K8", "M8", "N8", "Q8", "U8", "V8", "X8", "Z8", "F9", "G9", "H9", "J9", "K9", "M9", "N9", "Q9", "U9", "V9", "X9", "Z9" }; Date counter = Date.minDate(); // 10 years of futures must not exceed Date::maxDate Date last = Date.maxDate() - new Period(121, TimeUnit.Months); Date imm; while (counter <= last) { imm = IMM.nextDate(counter, false); // check that imm is greater than counter if (imm <= counter) { QAssert.Fail(imm.DayOfWeek + " " + imm + " is not greater than " + counter.DayOfWeek + " " + counter); } // check that imm is an IMM date if (!IMM.isIMMdate(imm, false)) { QAssert.Fail(imm.DayOfWeek + " " + imm + " is not an IMM date (calculated from " + counter.DayOfWeek + " " + counter + ")"); } // check that imm is <= to the next IMM date in the main cycle if (imm > IMM.nextDate(counter, true)) { QAssert.Fail(imm.DayOfWeek + " " + imm + " is not less than or equal to the next future in the main cycle " + IMM.nextDate(counter, true)); } //// check that if counter is an IMM date, then imm==counter //if (IMM::isIMMdate(counter, false) && (imm!=counter)) // BOOST_FAIL("\n " // << counter.weekday() << " " << counter // << " is already an IMM date, while nextIMM() returns " // << imm.weekday() << " " << imm); // check that for every date IMMdate is the inverse of IMMcode if (IMM.date(IMM.code(imm), counter) != imm) { QAssert.Fail(IMM.code(imm) + " at calendar day " + counter + " is not the IMM code matching " + imm); } // check that for every date the 120 IMM codes refer to future dates for (int i = 0; i < 40; ++i) { if (IMM.date(IMMcodes[i], counter) < counter) { QAssert.Fail(IMM.date(IMMcodes[i], counter) + " is wrong for " + IMMcodes[i] + " at reference date " + counter); } } counter = counter + 1; } }
public void testConsistency() { //("Testing dates..."); int minDate = Date.minDate().serialNumber() + 1, maxDate = Date.maxDate().serialNumber(); int dyold = new Date(minDate - 1).DayOfYear, dold = new Date(minDate - 1).Day, mold = new Date(minDate - 1).Month, yold = new Date(minDate - 1).Year, wdold = new Date(minDate - 1).weekday(); for (int i = minDate; i <= maxDate; i++) { Date t = new Date(i); int serial = t.serialNumber(); // check serial number consistency if (serial != i) { QAssert.Fail("inconsistent serial number:\n" + " original: " + i + "\n" + " date: " + t + "\n" + " serial number: " + serial); } int dy = t.DayOfYear, d = t.Day, m = t.Month, y = t.Year, wd = t.weekday(); // check if skipping any date if (!((dy == dyold + 1) || (dy == 1 && dyold == 365 && !Date.IsLeapYear(yold)) || (dy == 1 && dyold == 366 && Date.IsLeapYear(yold)))) { QAssert.Fail("wrong day of year increment: \n" + " date: " + t + "\n" + " day of year: " + dy + "\n" + " previous: " + dyold); } dyold = dy; if (!((d == dold + 1 && m == mold && y == yold) || (d == 1 && m == mold + 1 && y == yold) || (d == 1 && m == 1 && y == yold + 1))) { QAssert.Fail("wrong day,month,year increment: \n" + " date: " + t + "\n" + " day,month,year: " + d + "," + m + "," + y + "\n" + " previous: " + dold + "," + mold + "," + yold); } dold = d; mold = m; yold = y; // check month definition if (m < 1 || m > 12) { QAssert.Fail("invalid month: \n" + " date: " + t + "\n" + " month: " + m); } // check day definition if (d < 1) { QAssert.Fail("invalid day of month: \n" + " date: " + t + "\n" + " day: " + d); } if (!((m == 1 && d <= 31) || (m == 2 && d <= 28) || (m == 2 && d == 29 && Date.IsLeapYear(y)) || (m == 3 && d <= 31) || (m == 4 && d <= 30) || (m == 5 && d <= 31) || (m == 6 && d <= 30) || (m == 7 && d <= 31) || (m == 8 && d <= 31) || (m == 9 && d <= 30) || (m == 10 && d <= 31) || (m == 11 && d <= 30) || (m == 12 && d <= 31))) { QAssert.Fail("invalid day of month: \n" + " date: " + t + "\n" + " day: " + d); } // check weekday definition if (!((wd == wdold + 1) || (wd == 1 && wdold == 7))) { QAssert.Fail("invalid weekday: \n" + " date: " + t + "\n" + " weekday: " + wd + "\n" + " previous: " + wdold); } wdold = wd; // create the same date with a different constructor Date s = new Date(d, m, y); // check serial number consistency serial = s.serialNumber(); if (serial != i) { QAssert.Fail("inconsistent serial number:\n" + " date: " + t + "\n" + " serial number: " + i + "\n" + " cloned date: " + s + "\n" + " serial number: " + serial); } } }
public void testJpyLibor() { //"Testing bootstrap over JPY LIBOR swaps..."); CommonVars vars = new CommonVars(); vars.today = new Date(4, Month.October, 2007); Settings.Instance.setEvaluationDate(vars.today); vars.calendar = new Japan(); vars.settlement = vars.calendar.advance(vars.today, vars.settlementDays, TimeUnit.Days); // market elements vars.rates = new InitializedList <SimpleQuote>(vars.swaps); for (int i = 0; i < vars.swaps; i++) { vars.rates[i] = new SimpleQuote(vars.swapData[i].rate / 100); } // rate helpers vars.instruments = new InitializedList <RateHelper>(vars.swaps); IborIndex index = new JPYLibor(new Period(6, TimeUnit.Months)); for (int i = 0; i < vars.swaps; i++) { Handle <Quote> r = new Handle <Quote>(vars.rates[i]); vars.instruments[i] = new SwapRateHelper(r, new Period(vars.swapData[i].n, vars.swapData[i].units), vars.calendar, vars.fixedLegFrequency, vars.fixedLegConvention, vars.fixedLegDayCounter, index); } vars.termStructure = new PiecewiseYieldCurve <Discount, LogLinear>( vars.settlement, vars.instruments, new Actual360(), new List <Handle <Quote> >(), new List <Date>(), 1.0e-12); RelinkableHandle <YieldTermStructure> curveHandle = new RelinkableHandle <YieldTermStructure>(); curveHandle.linkTo(vars.termStructure); // check swaps IborIndex jpylibor6m = new JPYLibor(new Period(6, TimeUnit.Months), curveHandle); for (int i = 0; i < vars.swaps; i++) { Period tenor = new Period(vars.swapData[i].n, vars.swapData[i].units); VanillaSwap swap = new MakeVanillaSwap(tenor, jpylibor6m, 0.0) .withEffectiveDate(vars.settlement) .withFixedLegDayCount(vars.fixedLegDayCounter) .withFixedLegTenor(new Period(vars.fixedLegFrequency)) .withFixedLegConvention(vars.fixedLegConvention) .withFixedLegTerminationDateConvention(vars.fixedLegConvention) .withFixedLegCalendar(vars.calendar) .withFloatingLegCalendar(vars.calendar) .value(); double expectedRate = vars.swapData[i].rate / 100, estimatedRate = swap.fairRate(); double error = Math.Abs(expectedRate - estimatedRate); double tolerance = 1.0e-9; if (error > tolerance) { QAssert.Fail(vars.swapData[i].n + " year(s) swap:\n" + "\n estimated rate: " + (estimatedRate) + "\n expected rate: " + (expectedRate) + "\n error: " + (error) + "\n tolerance: " + (tolerance)); } } }
public void testLiborFixing() { // "Testing use of today's LIBOR fixings in swap curve..."); CommonVars vars = new CommonVars(); var swapHelpers = new InitializedList <RateHelper>(); IborIndex euribor6m = new Euribor6M(); for (int i = 0; i < vars.swaps; i++) { Handle <Quote> r = new Handle <Quote>(vars.rates[i + vars.deposits]); swapHelpers.Add(new SwapRateHelper(r, new Period(vars.swapData[i].n, vars.swapData[i].units), vars.calendar, vars.fixedLegFrequency, vars.fixedLegConvention, vars.fixedLegDayCounter, euribor6m)); } vars.termStructure = new PiecewiseYieldCurve <Discount, LogLinear>(vars.settlement, swapHelpers, new Actual360()); Handle <YieldTermStructure> curveHandle = new Handle <YieldTermStructure>(vars.termStructure); IborIndex index = new Euribor6M(curveHandle); for (int i = 0; i < vars.swaps; i++) { Period tenor = new Period(vars.swapData[i].n, vars.swapData[i].units); VanillaSwap swap = new MakeVanillaSwap(tenor, index, 0.0) .withEffectiveDate(vars.settlement) .withFixedLegDayCount(vars.fixedLegDayCounter) .withFixedLegTenor(new Period(vars.fixedLegFrequency)) .withFixedLegConvention(vars.fixedLegConvention) .withFixedLegTerminationDateConvention(vars.fixedLegConvention) .value(); double expectedRate = vars.swapData[i].rate / 100, estimatedRate = swap.fairRate(); double tolerance = 1.0e-9; if (Math.Abs(expectedRate - estimatedRate) > tolerance) { QAssert.Fail("before LIBOR fixing:\n" + vars.swapData[i].n + " year(s) swap:\n" + " estimated rate: " + (estimatedRate) + "\n" + " expected rate: " + (expectedRate)); } } Flag f = new Flag(); vars.termStructure.registerWith(f.update); f.lower(); index.addFixing(vars.today, 0.0425); if (!f.isUp()) { QAssert.Fail("Observer was not notified of rate fixing"); } for (int i = 0; i < vars.swaps; i++) { Period tenor = new Period(vars.swapData[i].n, vars.swapData[i].units); VanillaSwap swap = new MakeVanillaSwap(tenor, index, 0.0) .withEffectiveDate(vars.settlement) .withFixedLegDayCount(vars.fixedLegDayCounter) .withFixedLegTenor(new Period(vars.fixedLegFrequency)) .withFixedLegConvention(vars.fixedLegConvention) .withFixedLegTerminationDateConvention(vars.fixedLegConvention) .value(); double expectedRate = vars.swapData[i].rate / 100, estimatedRate = swap.fairRate(); double tolerance = 1.0e-9; if (Math.Abs(expectedRate - estimatedRate) > tolerance) { QAssert.Fail("after LIBOR fixing:\n" + vars.swapData[i].n + " year(s) swap:\n" + " estimated rate: " + (estimatedRate) + "\n" + " expected rate: " + (expectedRate)); } } }
public void testDifferentialEvolution() { //BOOST_TEST_MESSAGE("Testing differential evolution..."); /* Note: * * The "ModFourthDeJong" doesn't have a well defined optimum because * of its noisy part. It just has to be <= 15 in our example. * The concrete value might differ for a different input and * different random numbers. * * The "Griewangk" function is an example where the adaptive * version of DifferentialEvolution turns out to be more successful. */ DifferentialEvolution.Configuration conf = new DifferentialEvolution.Configuration() .withStepsizeWeight(0.4) .withBounds() .withCrossoverProbability(0.35) .withPopulationMembers(500) .withStrategy(DifferentialEvolution.Strategy.BestMemberWithJitter) .withCrossoverType(DifferentialEvolution.CrossoverType.Normal) .withAdaptiveCrossover() .withSeed(3242); DifferentialEvolution.Configuration conf2 = new DifferentialEvolution.Configuration() .withStepsizeWeight(1.8) .withBounds() .withCrossoverProbability(0.9) .withPopulationMembers(1000) .withStrategy(DifferentialEvolution.Strategy.Rand1SelfadaptiveWithRotation) .withCrossoverType(DifferentialEvolution.CrossoverType.Normal) .withAdaptiveCrossover() .withSeed(3242); DifferentialEvolution deOptim2 = new DifferentialEvolution(conf2); List <DifferentialEvolution> diffEvolOptimisers = new List <DifferentialEvolution>(); diffEvolOptimisers.Add(new DifferentialEvolution(conf)); diffEvolOptimisers.Add(new DifferentialEvolution(conf)); diffEvolOptimisers.Add(new DifferentialEvolution(conf)); diffEvolOptimisers.Add(new DifferentialEvolution(conf)); diffEvolOptimisers.Add(deOptim2); List <CostFunction> costFunctions = new List <CostFunction>(); costFunctions.Add(new FirstDeJong()); costFunctions.Add(new SecondDeJong()); costFunctions.Add(new ModThirdDeJong()); costFunctions.Add(new ModFourthDeJong()); costFunctions.Add(new Griewangk()); List <BoundaryConstraint> constraints = new List <BoundaryConstraint>(); constraints.Add(new BoundaryConstraint(-10.0, 10.0)); constraints.Add(new BoundaryConstraint(-10.0, 10.0)); constraints.Add(new BoundaryConstraint(-10.0, 10.0)); constraints.Add(new BoundaryConstraint(-10.0, 10.0)); constraints.Add(new BoundaryConstraint(-600.0, 600.0)); List <Vector> initialValues = new List <Vector>(); initialValues.Add(new Vector(3, 5.0)); initialValues.Add(new Vector(2, 5.0)); initialValues.Add(new Vector(5, 5.0)); initialValues.Add(new Vector(30, 5.0)); initialValues.Add(new Vector(10, 100.0)); List <EndCriteria> endCriteria = new List <EndCriteria>(); endCriteria.Add(new EndCriteria(100, 10, 1e-10, 1e-8, null)); endCriteria.Add(new EndCriteria(100, 10, 1e-10, 1e-8, null)); endCriteria.Add(new EndCriteria(100, 10, 1e-10, 1e-8, null)); endCriteria.Add(new EndCriteria(500, 100, 1e-10, 1e-8, null)); endCriteria.Add(new EndCriteria(1000, 800, 1e-12, 1e-10, null)); List <double> minima = new List <double>(); minima.Add(0.0); minima.Add(0.0); minima.Add(0.0); minima.Add(10.9639796558); minima.Add(0.0); for (int i = 0; i < costFunctions.Count; ++i) { Problem problem = new Problem(costFunctions[i], constraints[i], initialValues[i]); diffEvolOptimisers[i].minimize(problem, endCriteria[i]); if (i != 3) { // stable if (Math.Abs(problem.functionValue() - minima[i]) > 1e-8) { QAssert.Fail("costFunction # " + i + "\ncalculated: " + problem.functionValue() + "\nexpected: " + minima[i]); } } else { // this case is unstable due to randomness; we're good as // long as the result is below 15 if (problem.functionValue() > 15) { QAssert.Fail("costFunction # " + i + "\ncalculated: " + problem.functionValue() + "\nexpected: " + "less than 15"); } } } }
public void testActualActualWithSchedule() { // Testing actual/actual day counter with schedule // long first coupon Date issueDateExpected = new Date(17, Month.January, 2017); Date firstCouponDateExpected = new Date(31, Month.August, 2017); Schedule schedule = new MakeSchedule() .from(issueDateExpected) .withFirstDate(firstCouponDateExpected) .to(new Date(28, Month.February, 2026)) .withFrequency(Frequency.Semiannual) .withCalendar(new Canada()) .withConvention(BusinessDayConvention.Unadjusted) .backwards() .endOfMonth().value(); Date issueDate = schedule.date(0); Utils.QL_REQUIRE(issueDate == issueDateExpected, () => "This is not the expected issue date " + issueDate + " expected " + issueDateExpected); Date firstCouponDate = schedule.date(1); Utils.QL_REQUIRE(firstCouponDate == firstCouponDateExpected, () => "This is not the expected first coupon date " + firstCouponDate + " expected: " + firstCouponDateExpected); //Make thw quasi coupon dates: Date quasiCouponDate2 = schedule.calendar().advance(firstCouponDate, -schedule.tenor(), schedule.businessDayConvention(), schedule.endOfMonth()); Date quasiCouponDate1 = schedule.calendar().advance(quasiCouponDate2, -schedule.tenor(), schedule.businessDayConvention(), schedule.endOfMonth()); Date quasiCouponDate1Expected = new Date(31, Month.August, 2016); Date quasiCouponDate2Expected = new Date(28, Month.February, 2017); Utils.QL_REQUIRE(quasiCouponDate2 == quasiCouponDate2Expected, () => "Expected " + quasiCouponDate2Expected + " as the later quasi coupon date but received " + quasiCouponDate2); Utils.QL_REQUIRE(quasiCouponDate1 == quasiCouponDate1Expected, () => "Expected " + quasiCouponDate1Expected + " as the earlier quasi coupon date but received " + quasiCouponDate1); DayCounter dayCounter = new ActualActual(ActualActual.Convention.ISMA, schedule); // full coupon double t_with_reference = dayCounter.yearFraction( issueDate, firstCouponDate, quasiCouponDate2, firstCouponDate ); double t_no_reference = dayCounter.yearFraction( issueDate, firstCouponDate ); double t_total = ISMAYearFractionWithReferenceDates(dayCounter, issueDate, quasiCouponDate2, quasiCouponDate1, quasiCouponDate2) + 0.5; double expected = 0.6160220994; if (Math.Abs(t_total - expected) > 1.0e-10) { QAssert.Fail("Failed to reproduce expected time:\n" + " calculated: " + t_total + "\n" + " expected: " + expected); } if (Math.Abs(t_with_reference - expected) > 1.0e-10) { QAssert.Fail("Failed to reproduce expected time:\n" + " calculated: " + t_with_reference + "\n" + " expected: " + expected); } if (Math.Abs(t_no_reference - t_with_reference) > 1.0e-10) { QAssert.Fail("Should produce the same time whether or not references are present"); } // settlement date in the first quasi-period Date settlementDate = new Date(29, Month.January, 2017); t_with_reference = ISMAYearFractionWithReferenceDates( dayCounter, issueDate, settlementDate, quasiCouponDate1, quasiCouponDate2 ); t_no_reference = dayCounter.yearFraction(issueDate, settlementDate); double t_expected_first_qp = 0.03314917127071823; //12.0/362 if (Math.Abs(t_with_reference - t_expected_first_qp) > 1.0e-10) { QAssert.Fail("Failed to reproduce expected time:\n" + " calculated: " + t_no_reference + "\n" + " expected: " + t_expected_first_qp); } if (Math.Abs(t_no_reference - t_with_reference) > 1.0e-10) { QAssert.Fail("Should produce the same time whether or not references are present"); } double t2 = dayCounter.yearFraction(settlementDate, firstCouponDate); if (Math.Abs(t_expected_first_qp + t2 - expected) > 1.0e-10) { QAssert.Fail("Sum of quasiperiod2 split is not consistent"); } // settlement date in the second quasi-period settlementDate = new Date(29, Month.July, 2017); t_no_reference = dayCounter.yearFraction(issueDate, settlementDate); t_with_reference = ISMAYearFractionWithReferenceDates( dayCounter, issueDate, quasiCouponDate2, quasiCouponDate1, quasiCouponDate2 ) + ISMAYearFractionWithReferenceDates( dayCounter, quasiCouponDate2, settlementDate, quasiCouponDate2, firstCouponDate ); if (Math.Abs(t_no_reference - t_with_reference) > 1.0e-10) { QAssert.Fail("These two cases should be identical"); } t2 = dayCounter.yearFraction(settlementDate, firstCouponDate); if (Math.Abs(t_total - (t_no_reference + t2)) > 1.0e-10) { QAssert.Fail("Failed to reproduce expected time:\n" + " calculated: " + t_total + "\n" + " expected: " + t_no_reference + t2); } }
public List <CashFlow> makeYoYCapFlooredLeg(int which, Date startDate, int length, List <double?> caps, List <double?> floors, double volatility, double gearing = 1.0, double spread = 0.0) { Handle <YoYOptionletVolatilitySurface> vol = new Handle <YoYOptionletVolatilitySurface>( new ConstantYoYOptionletVolatility(volatility, settlementDays, calendar, convention, dc, observationLag, frequency, iir.interpolated())); YoYInflationCouponPricer pricer = null; switch (which) { case 0: pricer = new BlackYoYInflationCouponPricer(vol); break; case 1: pricer = new UnitDisplacedBlackYoYInflationCouponPricer(vol); break; case 2: pricer = new BachelierYoYInflationCouponPricer(vol); break; default: QAssert.Fail("unknown coupon pricer request: which = " + which + "should be 0=Black,1=DD,2=Bachelier"); break; } List <double> gearingVector = new InitializedList <double>(length, gearing); List <double> spreadVector = new InitializedList <double>(length, spread); YoYInflationIndex ii = iir as YoYInflationIndex; Date endDate = calendar.advance(startDate, new Period(length, TimeUnit.Years), BusinessDayConvention.Unadjusted); Schedule schedule = new Schedule(startDate, endDate, new Period(frequency), calendar, BusinessDayConvention.Unadjusted, BusinessDayConvention.Unadjusted,// ref periods & acc periods DateGeneration.Rule.Forward, false); List <CashFlow> yoyLeg = new yoyInflationLeg(schedule, calendar, ii, observationLag) .withPaymentDayCounter(dc) .withGearings(gearingVector) .withSpreads(spreadVector) .withCaps(caps) .withFloors(floors) .withNotionals(nominals) .withPaymentAdjustment(convention); for (int i = 0; i < yoyLeg.Count; i++) { ((YoYInflationCoupon)(yoyLeg[i])).setPricer(pricer); } //setCouponPricer(iborLeg, pricer); return(yoyLeg); }
public void testBusiness252() { // Testing business/252 day counter List <Date> testDates = new List <Date>(); testDates.Add(new Date(1, Month.February, 2002)); testDates.Add(new Date(4, Month.February, 2002)); testDates.Add(new Date(16, Month.May, 2003)); testDates.Add(new Date(17, Month.December, 2003)); testDates.Add(new Date(17, Month.December, 2004)); testDates.Add(new Date(19, Month.December, 2005)); testDates.Add(new Date(2, Month.January, 2006)); testDates.Add(new Date(13, Month.March, 2006)); testDates.Add(new Date(15, Month.May, 2006)); testDates.Add(new Date(17, Month.March, 2006)); testDates.Add(new Date(15, Month.May, 2006)); testDates.Add(new Date(26, Month.July, 2006)); testDates.Add(new Date(28, Month.June, 2007)); testDates.Add(new Date(16, Month.September, 2009)); testDates.Add(new Date(26, Month.July, 2016)); double[] expected = { 0.0039682539683, 1.2738095238095, 0.6031746031746, 0.9960317460317, 1.0000000000000, 0.0396825396825, 0.1904761904762, 0.1666666666667, -0.1507936507937, 0.1507936507937, 0.2023809523810, 0.912698412698, 2.214285714286, 6.84126984127 }; DayCounter dayCounter1 = new Business252(new Brazil()); double calculated; for (int i = 1; i < testDates.Count; i++) { calculated = dayCounter1.yearFraction(testDates[i - 1], testDates[i]); if (Math.Abs(calculated - expected[i - 1]) > 1.0e-12) { QAssert.Fail("from " + testDates[i - 1] + " to " + testDates[i] + ":\n" + " calculated: " + calculated + "\n" + " expected: " + expected[i - 1]); } } DayCounter dayCounter2 = new Business252(); for (int i = 1; i < testDates.Count; i++) { calculated = dayCounter2.yearFraction(testDates[i - 1], testDates[i]); if (Math.Abs(calculated - expected[i - 1]) > 1.0e-12) { QAssert.Fail("from " + testDates[i - 1] + " to " + testDates[i] + ":\n" + " calculated: " + calculated + "\n" + " expected: " + expected[i - 1]); } } }
public void testDecomposition() { // Testing collared coupon against its decomposition... CommonVars vars = new CommonVars(); double tolerance = 1e-10; double npvVanilla, npvCappedLeg, npvFlooredLeg, npvCollaredLeg, npvCap, npvFloor, npvCollar; double error; double floorstrike = 0.05; double capstrike = 0.10; List <double?> caps = new InitializedList <double?>(vars.length, capstrike); List <double?> caps0 = new List <double?>(); List <double?> floors = new InitializedList <double?>(vars.length, floorstrike); List <double?> floors0 = new List <double?>(); double gearing_p = 0.5; double spread_p = 0.002; double gearing_n = -1.5; double spread_n = 0.12; // fixed leg with zero rate List <CashFlow> fixedLeg = vars.makeFixedLeg(vars.startDate, vars.length); // floating leg with gearing=1 and spread=0 List <CashFlow> floatLeg = vars.makeYoYLeg(vars.startDate, vars.length); // floating leg with positive gearing (gearing_p) and spread<>0 List <CashFlow> floatLeg_p = vars.makeYoYLeg(vars.startDate, vars.length, gearing_p, spread_p); // floating leg with negative gearing (gearing_n) and spread<>0 List <CashFlow> floatLeg_n = vars.makeYoYLeg(vars.startDate, vars.length, gearing_n, spread_n); // Swap with null fixed leg and floating leg with gearing=1 and spread=0 Swap vanillaLeg = new Swap(fixedLeg, floatLeg); // Swap with null fixed leg and floating leg with positive gearing and spread<>0 Swap vanillaLeg_p = new Swap(fixedLeg, floatLeg_p); // Swap with null fixed leg and floating leg with negative gearing and spread<>0 Swap vanillaLeg_n = new Swap(fixedLeg, floatLeg_n); IPricingEngine engine = new DiscountingSwapEngine(vars.nominalTS); vanillaLeg.setPricingEngine(engine); // here use the autoset feature vanillaLeg_p.setPricingEngine(engine); vanillaLeg_n.setPricingEngine(engine); // CAPPED coupon - Decomposition of payoff // Payoff = Nom * Min(rate,strike) * accrualperiod = // = Nom * [rate + Min(0,strike-rate)] * accrualperiod = // = Nom * rate * accrualperiod - Nom * Max(rate-strike,0) * accrualperiod = // = VanillaFloatingLeg - Call // int whichPricer = 0; // Case gearing = 1 and spread = 0 List <CashFlow> cappedLeg = vars.makeYoYCapFlooredLeg(whichPricer, vars.startDate, vars.length, caps, floors0, vars.volatility); Swap capLeg = new Swap(fixedLeg, cappedLeg); capLeg.setPricingEngine(engine); YoYInflationCap cap = new YoYInflationCap(floatLeg, new List <double>() { capstrike }); cap.setPricingEngine(vars.makeEngine(vars.volatility, whichPricer)); npvVanilla = vanillaLeg.NPV(); npvCappedLeg = capLeg.NPV(); npvCap = cap.NPV(); error = Math.Abs(npvCappedLeg - (npvVanilla - npvCap)); if (error > tolerance) { QAssert.Fail("\nYoY Capped Leg: gearing=1, spread=0%, strike=" + capstrike * 100 + "%\n" + " Capped Floating Leg NPV: " + npvCappedLeg + "\n" + " Floating Leg NPV - Cap NPV: " + (npvVanilla - npvCap) + "\n" + " Diff: " + error); } // gearing = 1 and spread = 0 // FLOORED coupon - Decomposition of payoff // Payoff = Nom * Max(rate,strike) * accrualperiod = // = Nom * [rate + Max(0,strike-rate)] * accrualperiod = // = Nom * rate * accrualperiod + Nom * Max(strike-rate,0) * accrualperiod = // = VanillaFloatingLeg + Put // List <CashFlow> flooredLeg = vars.makeYoYCapFlooredLeg(whichPricer, vars.startDate, vars.length, caps0, floors, vars.volatility); Swap floorLeg = new Swap(fixedLeg, flooredLeg); floorLeg.setPricingEngine(engine); YoYInflationFloor floor = new YoYInflationFloor(floatLeg, new List <double>() { floorstrike }); floor.setPricingEngine(vars.makeEngine(vars.volatility, whichPricer)); npvFlooredLeg = floorLeg.NPV(); npvFloor = floor.NPV(); error = Math.Abs(npvFlooredLeg - (npvVanilla + npvFloor)); if (error > tolerance) { QAssert.Fail("YoY Floored Leg: gearing=1, spread=0%, strike=" + floorstrike * 100 + "%\n" + " Floored Floating Leg NPV: " + npvFlooredLeg + "\n" + " Floating Leg NPV + Floor NPV: " + (npvVanilla + npvFloor) + "\n" + " Diff: " + error); } // gearing = 1 and spread = 0 // COLLARED coupon - Decomposition of payoff // Payoff = Nom * Min(strikem,Max(rate,strikeM)) * accrualperiod = // = VanillaFloatingLeg - Collar // List <CashFlow> collaredLeg = vars.makeYoYCapFlooredLeg(whichPricer, vars.startDate, vars.length, caps, floors, vars.volatility); Swap collarLeg = new Swap(fixedLeg, collaredLeg); collarLeg.setPricingEngine(engine); YoYInflationCollar collar = new YoYInflationCollar(floatLeg, new List <double>() { capstrike }, new List <double>() { floorstrike }); collar.setPricingEngine(vars.makeEngine(vars.volatility, whichPricer)); npvCollaredLeg = collarLeg.NPV(); npvCollar = collar.NPV(); error = Math.Abs(npvCollaredLeg - (npvVanilla - npvCollar)); if (error > tolerance) { QAssert.Fail("\nYoY Collared Leg: gearing=1, spread=0%, strike=" + floorstrike * 100 + "% and " + capstrike * 100 + "%\n" + " Collared Floating Leg NPV: " + npvCollaredLeg + "\n" + " Floating Leg NPV - Collar NPV: " + (npvVanilla - npvCollar) + "\n" + " Diff: " + error); } // gearing = a and spread = b // CAPPED coupon - Decomposition of payoff // Payoff // = Nom * Min(a*rate+b,strike) * accrualperiod = // = Nom * [a*rate+b + Min(0,strike-a*rate-b)] * accrualperiod = // = Nom * a*rate+b * accrualperiod + Nom * Min(strike-b-a*rate,0) * accrualperiod // --> If a>0 (assuming positive effective strike): // Payoff = VanillaFloatingLeg - Call(a*rate+b,strike) // --> If a<0 (assuming positive effective strike): // Payoff = VanillaFloatingLeg + Nom * Min(strike-b+|a|*rate+,0) * accrualperiod = // = VanillaFloatingLeg + Put(|a|*rate+b,strike) // // Positive gearing List <CashFlow> cappedLeg_p = vars.makeYoYCapFlooredLeg(whichPricer, vars.startDate, vars.length, caps, floors0, vars.volatility, gearing_p, spread_p); Swap capLeg_p = new Swap(fixedLeg, cappedLeg_p); capLeg_p.setPricingEngine(engine); YoYInflationCap cap_p = new YoYInflationCap(floatLeg_p, new List <double>() { capstrike }); cap_p.setPricingEngine(vars.makeEngine(vars.volatility, whichPricer)); npvVanilla = vanillaLeg_p.NPV(); npvCappedLeg = capLeg_p.NPV(); npvCap = cap_p.NPV(); error = Math.Abs(npvCappedLeg - (npvVanilla - npvCap)); if (error > tolerance) { QAssert.Fail("\nYoY Capped Leg: gearing=" + gearing_p + ", " + "spread= " + spread_p * 100 + "%, strike=" + capstrike * 100 + "%, " + "effective strike= " + (capstrike - spread_p) / gearing_p * 100 + "%\n" + " Capped Floating Leg NPV: " + npvCappedLeg + "\n" + " Vanilla Leg NPV: " + npvVanilla + "\n" + " Cap NPV: " + npvCap + "\n" + " Floating Leg NPV - Cap NPV: " + (npvVanilla - npvCap) + "\n" + " Diff: " + error); } // Negative gearing List <CashFlow> cappedLeg_n = vars.makeYoYCapFlooredLeg(whichPricer, vars.startDate, vars.length, caps, floors0, vars.volatility, gearing_n, spread_n); Swap capLeg_n = new Swap(fixedLeg, cappedLeg_n); capLeg_n.setPricingEngine(engine); YoYInflationFloor floor_n = new YoYInflationFloor(floatLeg, new List <double>() { (capstrike - spread_n) / gearing_n }); floor_n.setPricingEngine(vars.makeEngine(vars.volatility, whichPricer)); npvVanilla = vanillaLeg_n.NPV(); npvCappedLeg = capLeg_n.NPV(); npvFloor = floor_n.NPV(); error = Math.Abs(npvCappedLeg - (npvVanilla + gearing_n * npvFloor)); if (error > tolerance) { QAssert.Fail("\nYoY Capped Leg: gearing=" + gearing_n + ", " + "spread= " + spread_n * 100 + "%, strike=" + capstrike * 100 + "%, " + "effective strike= " + ((capstrike - spread_n) / gearing_n * 100) + "%\n" + " Capped Floating Leg NPV: " + npvCappedLeg + "\n" + " npv Vanilla: " + npvVanilla + "\n" + " npvFloor: " + npvFloor + "\n" + " Floating Leg NPV - Cap NPV: " + (npvVanilla + gearing_n * npvFloor) + "\n" + " Diff: " + error); } // gearing = a and spread = b // FLOORED coupon - Decomposition of payoff // Payoff // = Nom * Max(a*rate+b,strike) * accrualperiod = // = Nom * [a*rate+b + Max(0,strike-a*rate-b)] * accrualperiod = // = Nom * a*rate+b * accrualperiod + Nom * Max(strike-b-a*rate,0) * accrualperiod // --> If a>0 (assuming positive effective strike): // Payoff = VanillaFloatingLeg + Put(a*rate+b,strike) // --> If a<0 (assuming positive effective strike): // Payoff = VanillaFloatingLeg + Nom * Max(strike-b+|a|*rate+,0) * accrualperiod = // = VanillaFloatingLeg - Call(|a|*rate+b,strike) // // Positive gearing List <CashFlow> flooredLeg_p1 = vars.makeYoYCapFlooredLeg(whichPricer, vars.startDate, vars.length, caps0, floors, vars.volatility, gearing_p, spread_p); Swap floorLeg_p1 = new Swap(fixedLeg, flooredLeg_p1); floorLeg_p1.setPricingEngine(engine); YoYInflationFloor floor_p1 = new YoYInflationFloor(floatLeg_p, new List <double>() { floorstrike }); floor_p1.setPricingEngine(vars.makeEngine(vars.volatility, whichPricer)); npvVanilla = vanillaLeg_p.NPV(); npvFlooredLeg = floorLeg_p1.NPV(); npvFloor = floor_p1.NPV(); error = Math.Abs(npvFlooredLeg - (npvVanilla + npvFloor)); if (error > tolerance) { QAssert.Fail("\nYoY Floored Leg: gearing=" + gearing_p + ", " + "spread= " + spread_p * 100 + "%, strike=" + floorstrike * 100 + "%, " + "effective strike= " + (floorstrike - spread_p) / gearing_p * 100 + "%\n" + " Floored Floating Leg NPV: " + npvFlooredLeg + "\n" + " Floating Leg NPV + Floor NPV: " + (npvVanilla + npvFloor) + "\n" + " Diff: " + error); } // Negative gearing List <CashFlow> flooredLeg_n = vars.makeYoYCapFlooredLeg(whichPricer, vars.startDate, vars.length, caps0, floors, vars.volatility, gearing_n, spread_n); Swap floorLeg_n = new Swap(fixedLeg, flooredLeg_n); floorLeg_n.setPricingEngine(engine); YoYInflationCap cap_n = new YoYInflationCap(floatLeg, new List <double>() { (floorstrike - spread_n) / gearing_n }); cap_n.setPricingEngine(vars.makeEngine(vars.volatility, whichPricer)); npvVanilla = vanillaLeg_n.NPV(); npvFlooredLeg = floorLeg_n.NPV(); npvCap = cap_n.NPV(); error = Math.Abs(npvFlooredLeg - (npvVanilla - gearing_n * npvCap)); if (error > tolerance) { QAssert.Fail("\nYoY Capped Leg: gearing=" + gearing_n + ", " + "spread= " + spread_n * 100 + "%, strike=" + floorstrike * 100 + "%, " + "effective strike= " + (floorstrike - spread_n) / gearing_n * 100 + "%\n" + " Capped Floating Leg NPV: " + npvFlooredLeg + "\n" + " Floating Leg NPV - Cap NPV: " + (npvVanilla - gearing_n * npvCap) + "\n" + " Diff: " + error); } // gearing = a and spread = b // COLLARED coupon - Decomposition of payoff // Payoff = Nom * Min(caprate,Max(a*rate+b,floorrate)) * accrualperiod // --> If a>0 (assuming positive effective strike): // Payoff = VanillaFloatingLeg - Collar(a*rate+b, floorrate, caprate) // --> If a<0 (assuming positive effective strike): // Payoff = VanillaFloatingLeg + Collar(|a|*rate+b, caprate, floorrate) // // Positive gearing List <CashFlow> collaredLeg_p = vars.makeYoYCapFlooredLeg(whichPricer, vars.startDate, vars.length, caps, floors, vars.volatility, gearing_p, spread_p); Swap collarLeg_p1 = new Swap(fixedLeg, collaredLeg_p); collarLeg_p1.setPricingEngine(engine); YoYInflationCollar collar_p = new YoYInflationCollar(floatLeg_p, new List <double>() { capstrike }, new List <double>() { floorstrike }); collar_p.setPricingEngine(vars.makeEngine(vars.volatility, whichPricer)); npvVanilla = vanillaLeg_p.NPV(); npvCollaredLeg = collarLeg_p1.NPV(); npvCollar = collar_p.NPV(); error = Math.Abs(npvCollaredLeg - (npvVanilla - npvCollar)); if (error > tolerance) { QAssert.Fail("\nYoY Collared Leg: gearing=" + gearing_p + ", " + "spread= " + spread_p * 100 + "%, strike=" + floorstrike * 100 + "% and " + capstrike * 100 + "%, " + "effective strike=" + (floorstrike - spread_p) / gearing_p * 100 + "% and " + (capstrike - spread_p) / gearing_p * 100 + "%\n" + " Collared Floating Leg NPV: " + npvCollaredLeg + "\n" + " Floating Leg NPV - Collar NPV: " + (npvVanilla - npvCollar) + "\n" + " Diff: " + error); } // Negative gearing List <CashFlow> collaredLeg_n = vars.makeYoYCapFlooredLeg(whichPricer, vars.startDate, vars.length, caps, floors, vars.volatility, gearing_n, spread_n); Swap collarLeg_n1 = new Swap(fixedLeg, collaredLeg_n); collarLeg_n1.setPricingEngine(engine); YoYInflationCollar collar_n = new YoYInflationCollar(floatLeg, new List <double>() { (floorstrike - spread_n) / gearing_n }, new List <double>() { (capstrike - spread_n) / gearing_n }); collar_n.setPricingEngine(vars.makeEngine(vars.volatility, whichPricer)); npvVanilla = vanillaLeg_n.NPV(); npvCollaredLeg = collarLeg_n1.NPV(); npvCollar = collar_n.NPV(); error = Math.Abs(npvCollaredLeg - (npvVanilla - gearing_n * npvCollar)); if (error > tolerance) { QAssert.Fail("\nYoY Collared Leg: gearing=" + gearing_n + ", " + "spread= " + spread_n * 100 + "%, strike=" + floorstrike * 100 + "% and " + capstrike * 100 + "%, " + "effective strike=" + (floorstrike - spread_n) / gearing_n * 100 + "% and " + (capstrike - spread_n) / gearing_n * 100 + "%\n" + " Collared Floating Leg NPV: " + npvCollaredLeg + "\n" + " Floating Leg NPV - Collar NPV: " + (npvVanilla - gearing_n * npvCollar) + "\n" + " Diff: " + error); } // remove circular refernce vars.hy.linkTo(null); }
public void testThirty360_EurobondBasis() { // Testing thirty/360 day counter (Eurobond Basis) // Source: ISDA 2006 Definitions 4.16 (g) // 30E/360 (or Eurobond Basis) // Based on ICMA (Rule 251) and FBF; this is the version of 30E/360 used by Excel DayCounter dayCounter = new Thirty360(Thirty360.Thirty360Convention.EurobondBasis); List <Date> testStartDates = new List <Date>(); List <Date> testEndDates = new List <Date>(); int calculated; // ISDA - Example 1: End dates do not involve the last day of February testStartDates.Add(new Date(20, Month.August, 2006)); testEndDates.Add(new Date(20, Month.February, 2007)); testStartDates.Add(new Date(20, Month.February, 2007)); testEndDates.Add(new Date(20, Month.August, 2007)); testStartDates.Add(new Date(20, Month.August, 2007)); testEndDates.Add(new Date(20, Month.February, 2008)); testStartDates.Add(new Date(20, Month.February, 2008)); testEndDates.Add(new Date(20, Month.August, 2008)); testStartDates.Add(new Date(20, Month.August, 2008)); testEndDates.Add(new Date(20, Month.February, 2009)); testStartDates.Add(new Date(20, Month.February, 2009)); testEndDates.Add(new Date(20, Month.August, 2009)); //// ISDA - Example 2: End dates include some end-February dates testStartDates.Add(new Date(28, Month.February, 2006)); testEndDates.Add(new Date(31, Month.August, 2006)); testStartDates.Add(new Date(31, Month.August, 2006)); testEndDates.Add(new Date(28, Month.February, 2007)); testStartDates.Add(new Date(28, Month.February, 2007)); testEndDates.Add(new Date(31, Month.August, 2007)); testStartDates.Add(new Date(31, Month.August, 2007)); testEndDates.Add(new Date(29, Month.February, 2008)); testStartDates.Add(new Date(29, Month.February, 2008)); testEndDates.Add(new Date(31, Month.August, 2008)); testStartDates.Add(new Date(31, Month.August, 2008)); testEndDates.Add(new Date(28, Month.Feb, 2009)); testStartDates.Add(new Date(28, Month.February, 2009)); testEndDates.Add(new Date(31, Month.August, 2009)); testStartDates.Add(new Date(31, Month.August, 2009)); testEndDates.Add(new Date(28, Month.Feb, 2010)); testStartDates.Add(new Date(28, Month.February, 2010)); testEndDates.Add(new Date(31, Month.August, 2010)); testStartDates.Add(new Date(31, Month.August, 2010)); testEndDates.Add(new Date(28, Month.Feb, 2011)); testStartDates.Add(new Date(28, Month.February, 2011)); testEndDates.Add(new Date(31, Month.August, 2011)); testStartDates.Add(new Date(31, Month.August, 2011)); testEndDates.Add(new Date(29, Month.Feb, 2012)); //// ISDA - Example 3: Miscellaneous calculations testStartDates.Add(new Date(31, Month.January, 2006)); testEndDates.Add(new Date(28, Month.February, 2006)); testStartDates.Add(new Date(30, Month.January, 2006)); testEndDates.Add(new Date(28, Month.February, 2006)); testStartDates.Add(new Date(28, Month.February, 2006)); testEndDates.Add(new Date(3, Month.March, 2006)); testStartDates.Add(new Date(14, Month.February, 2006)); testEndDates.Add(new Date(28, Month.February, 2006)); testStartDates.Add(new Date(30, Month.September, 2006)); testEndDates.Add(new Date(31, Month.October, 2006)); testStartDates.Add(new Date(31, Month.October, 2006)); testEndDates.Add(new Date(28, Month.November, 2006)); testStartDates.Add(new Date(31, Month.August, 2007)); testEndDates.Add(new Date(28, Month.February, 2008)); testStartDates.Add(new Date(28, Month.February, 2008)); testEndDates.Add(new Date(28, Month.August, 2008)); testStartDates.Add(new Date(28, Month.February, 2008)); testEndDates.Add(new Date(30, Month.August, 2008)); testStartDates.Add(new Date(28, Month.February, 2008)); testEndDates.Add(new Date(31, Month.August, 2008)); testStartDates.Add(new Date(26, Month.February, 2007)); testEndDates.Add(new Date(28, Month.February, 2008)); testStartDates.Add(new Date(26, Month.February, 2007)); testEndDates.Add(new Date(29, Month.February, 2008)); testStartDates.Add(new Date(29, Month.February, 2008)); testEndDates.Add(new Date(28, Month.February, 2009)); testStartDates.Add(new Date(28, Month.February, 2008)); testEndDates.Add(new Date(30, Month.March, 2008)); testStartDates.Add(new Date(28, Month.February, 2008)); testEndDates.Add(new Date(31, Month.March, 2008)); int[] expected = { 180, 180, 180, 180, 180, 180, 182, 178, 182, 179, 181, 178, 182, 178, 182, 178, 182, 179, 28, 28, 5, 14, 30, 28, 178, 180, 182, 182, 362, 363, 359, 32, 32 }; for (int i = 0; i < testStartDates.Count; i++) { calculated = dayCounter.dayCount(testStartDates[i], testEndDates[i]); if (calculated != expected[i]) { QAssert.Fail("from " + testStartDates[i] + " to " + testEndDates[i] + ":\n" + " calculated: " + calculated + "\n" + " expected: " + expected[i]); } } }
public void testInstrumentEquality() { // Testing inflation capped/floored coupon against inflation capfloor instrument... CommonVars vars = new CommonVars(); int[] lengths = { 1, 2, 3, 5, 7, 10, 15, 20 }; // vol is low ... double[] strikes = { 0.01, 0.025, 0.029, 0.03, 0.031, 0.035, 0.07 }; // yoy inflation vol is generally very low double[] vols = { 0.001, 0.005, 0.010, 0.015, 0.020 }; // this is model independent // capped coupon = fwd - cap, and fwd = swap(0) // floored coupon = fwd + floor for (int whichPricer = 0; whichPricer < 3; whichPricer++) { for (int i = 0; i < lengths.Length; i++) { for (int j = 0; j < strikes.Length; j++) { for (int k = 0; k < vols.Length; k++) { List <CashFlow> leg = vars.makeYoYLeg(vars.evaluationDate, lengths[i]); Instrument cap = vars.makeYoYCapFloor(CapFloorType.Cap, leg, strikes[j], vols[k], whichPricer); Instrument floor = vars.makeYoYCapFloor(CapFloorType.Floor, leg, strikes[j], vols[k], whichPricer); Date from = vars.nominalTS.link.referenceDate(); Date to = from + new Period(lengths[i], TimeUnit.Years); Schedule yoySchedule = new MakeSchedule().from(from).to(to) .withTenor(new Period(1, TimeUnit.Years)) .withCalendar(new UnitedKingdom()) .withConvention(BusinessDayConvention.Unadjusted) .backwards().value(); YearOnYearInflationSwap swap = new YearOnYearInflationSwap(YearOnYearInflationSwap.Type.Payer, 1000000.0, yoySchedule, //fixed schedule, but same as yoy 0.0, //strikes[j], vars.dc, yoySchedule, vars.iir, vars.observationLag, 0.0, //spread on index vars.dc, new UnitedKingdom()); Handle <YieldTermStructure> hTS = new Handle <YieldTermStructure>(vars.nominalTS); IPricingEngine sppe = new DiscountingSwapEngine(hTS); swap.setPricingEngine(sppe); List <CashFlow> leg2 = vars.makeYoYCapFlooredLeg(whichPricer, from, lengths[i], new InitializedList <double?>(lengths[i], strikes[j]), //cap new List <double?>(), //floor vols[k], 1.0, // gearing 0.0); // spread List <CashFlow> leg3 = vars.makeYoYCapFlooredLeg(whichPricer, from, lengths[i], new List <double?>(), // cap new InitializedList <double?>(lengths[i], strikes[j]), //floor vols[k], 1.0, // gearing 0.0); // spread // N.B. nominals are 10e6 double capped = CashFlows.npv(leg2, vars.nominalTS, false); if (Math.Abs(capped - (swap.NPV() - cap.NPV())) > 1.0e-6) { QAssert.Fail( "capped coupon != swap(0) - cap:\n" + " length: " + lengths[i] + " years\n" + " volatility: " + vols[k] + "\n" + " strike: " + strikes[j] + "\n" + " cap value: " + cap.NPV() + "\n" + " swap value: " + swap.NPV() + "\n" + " capped coupon " + capped); } // N.B. nominals are 10e6 double floored = CashFlows.npv(leg3, vars.nominalTS, false); if (Math.Abs(floored - (swap.NPV() + floor.NPV())) > 1.0e-6) { QAssert.Fail( "floored coupon != swap(0) + floor :\n" + " length: " + lengths[i] + " years\n" + " volatility: " + vols[k] + "\n" + " strike: " + strikes[j] + "\n" + " floor value: " + floor.NPV() + "\n" + " swap value: " + swap.NPV() + "\n" + " floored coupon " + floored); } } } } } // remove circular refernce vars.hy.linkTo(null); }
public void testChambersImpliedVol() { // Testing Chambers-Nawalkha implied vol approximation Option.Type[] types = { Option.Type.Call, Option.Type.Put }; double[] displacements = { 0.0000, 0.0010, 0.0050, 0.0100, 0.0200 }; double[] forwards = { -0.0010, 0.0000, 0.0050, 0.0100, 0.0200, 0.0500 }; double[] strikes = { -0.0100, -0.0050, -0.0010, 0.0000, 0.0010, 0.0050, 0.0100, 0.0200, 0.0500, 0.1000 }; double[] stdDevs = { 0.10, 0.15, 0.20, 0.30, 0.50, 0.60, 0.70, 0.80, 1.00, 1.50, 2.00 }; double[] discounts = { 1.00, 0.95, 0.80, 1.10 }; double tol = 5.0E-4; for (int i1 = 0; i1 < types.Length; ++i1) { for (int i2 = 0; i2 < displacements.Length; ++i2) { for (int i3 = 0; i3 < forwards.Length; ++i3) { for (int i4 = 0; i4 < strikes.Length; ++i4) { for (int i5 = 0; i5 < stdDevs.Length; ++i5) { for (int i6 = 0; i6 < discounts.Length; ++i6) { if (forwards[i3] + displacements[i2] > 0.0 && strikes[i4] + displacements[i2] > 0.0) { double premium = Utils.blackFormula( types[i1], strikes[i4], forwards[i3], stdDevs[i5], discounts[i6], displacements[i2]); double atmPremium = Utils.blackFormula( types[i1], forwards[i3], forwards[i3], stdDevs[i5], discounts[i6], displacements[i2]); double iStdDev = Utils.blackFormulaImpliedStdDevChambers( types[i1], strikes[i4], forwards[i3], premium, atmPremium, discounts[i6], displacements[i2]); double moneyness = (strikes[i4] + displacements[i2]) / (forwards[i3] + displacements[i2]); if (moneyness > 1.0) { moneyness = 1.0 / moneyness; } double error = (iStdDev - stdDevs[i5]) / stdDevs[i5] * moneyness; if (error > tol) { QAssert.Fail("Failed to verify Chambers-Nawalkha approximation for " + types[i1] + " displacement=" + displacements[i2] + " forward=" + forwards[i3] + " strike=" + strikes[i4] + " discount=" + discounts[i6] + " stddev=" + stdDevs[i5] + " result=" + iStdDev + " exceeds maximum error tolerance"); } } } } } } } } }
public void testSobol() { //("Testing Sobol sequences up to dimension " // + PPMT_MAX_DIM + "..."); List <double> point; double tolerance = 1.0e-15; // testing max dimensionality int dimensionality = (int)SobolRsg.PPMT_MAX_DIM; ulong seed = 123456; SobolRsg rsg = new SobolRsg(dimensionality, seed); int points = 100, i; for (i = 0; i < points; i++) { point = rsg.nextSequence().value; if (point.Count != dimensionality) { QAssert.Fail("Sobol sequence generator returns " + " a sequence of wrong dimensionality: " + point.Count + " instead of " + dimensionality); } } // testing homogeneity properties dimensionality = 33; seed = 123456; rsg = new SobolRsg(dimensionality, seed); SequenceStatistics stat = new SequenceStatistics(dimensionality); List <double> mean; int k = 0; for (int j = 1; j < 5; j++) { // five cycle points = (int)(Utils.Pow(2.0, j) - 1); // base 2 for (; k < points; k++) { point = rsg.nextSequence().value; stat.add(point); } mean = stat.mean(); for (i = 0; i < dimensionality; i++) { double error = Math.Abs(mean[i] - 0.5); if (error > tolerance) { QAssert.Fail(i + 1 + " dimension: " // + QL_FIXED + "mean (" + mean[i] + ") at the end of the " + j + 1 + " cycle in Sobol sequence is not " + 0.5 //+ QL_SCIENTIFIC + " (error = " + error + ")"); } } } // testing first dimension (van der Corput sequence) double[] vanderCorputSequenceModuloTwo = { // first cycle (zero excluded) 0.50000, // second cycle 0.75000, 0.25000, // third cycle 0.37500, 0.87500, 0.62500, 0.12500, // fourth cycle 0.18750, 0.68750, 0.93750, 0.43750, 0.31250, 0.81250, 0.56250, 0.06250, // fifth cycle 0.09375, 0.59375, 0.84375, 0.34375, 0.46875, 0.96875, 0.71875, 0.21875, 0.15625, 0.65625, 0.90625, 0.40625, 0.28125, 0.78125, 0.53125, 0.03125 }; dimensionality = 1; rsg = new SobolRsg(dimensionality); points = (int)(Utils.Pow(2.0, 5)) - 1; // five cycles for (i = 0; i < points; i++) { point = rsg.nextSequence().value; double error = Math.Abs(point[0] - vanderCorputSequenceModuloTwo[i]); if (error > tolerance) { QAssert.Fail(i + 1 + " draw (" //+ QL_FIXED + point[0] + ") in 1-D Sobol sequence is not in the " + "van der Corput sequence modulo two: " + "it should have been " + vanderCorputSequenceModuloTwo[i] //+ QL_SCIENTIFIC + " (error = " + error + ")"); } } }
public void testImpliedHazardRate() { // Testing implied hazard-rate for credit-default swaps... using (SavedSettings backup = new SavedSettings()) { // Initialize curves Calendar calendar = new TARGET(); Date today = calendar.adjust(Date.Today); Settings.setEvaluationDate(today); double h1 = 0.30, h2 = 0.40; DayCounter dayCounter = new Actual365Fixed(); List <Date> dates = new List <Date>(3); List <double> hazardRates = new List <double>(3); dates.Add(today); hazardRates.Add(h1); dates.Add(today + new Period(5, TimeUnit.Years)); hazardRates.Add(h1); dates.Add(today + new Period(10, TimeUnit.Years)); hazardRates.Add(h2); RelinkableHandle <DefaultProbabilityTermStructure> probabilityCurve = new RelinkableHandle <DefaultProbabilityTermStructure>(); probabilityCurve.linkTo(new InterpolatedHazardRateCurve <BackwardFlat>(dates, hazardRates, dayCounter)); RelinkableHandle <YieldTermStructure> discountCurve = new RelinkableHandle <YieldTermStructure>(); discountCurve.linkTo(new FlatForward(today, 0.03, new Actual360())); Frequency frequency = Frequency.Semiannual; BusinessDayConvention convention = BusinessDayConvention.ModifiedFollowing; Date issueDate = calendar.advance(today, -6, TimeUnit.Months); double fixedRate = 0.0120; DayCounter cdsDayCount = new Actual360(); double notional = 10000.0; double recoveryRate = 0.4; double?latestRate = null; for (int n = 6; n <= 10; ++n) { Date maturity = calendar.advance(issueDate, n, TimeUnit.Years); Schedule schedule = new Schedule(issueDate, maturity, new Period(frequency), calendar, convention, convention, DateGeneration.Rule.Forward, false); CreditDefaultSwap cds = new CreditDefaultSwap(Protection.Side.Seller, notional, fixedRate, schedule, convention, cdsDayCount, true, true); cds.setPricingEngine(new MidPointCdsEngine(probabilityCurve, recoveryRate, discountCurve)); double NPV = cds.NPV(); double flatRate = cds.impliedHazardRate(NPV, discountCurve, dayCounter, recoveryRate); if (flatRate < h1 || flatRate > h2) { QAssert.Fail("implied hazard rate outside expected range\n" + " maturity: " + n + " years\n" + " expected minimum: " + h1 + "\n" + " expected maximum: " + h2 + "\n" + " implied rate: " + flatRate); } if (n > 6 && flatRate < latestRate) { QAssert.Fail("implied hazard rate decreasing with swap maturity\n" + " maturity: " + n + " years\n" + " previous rate: " + latestRate + "\n" + " implied rate: " + flatRate); } latestRate = flatRate; RelinkableHandle <DefaultProbabilityTermStructure> probability = new RelinkableHandle <DefaultProbabilityTermStructure>(); probability.linkTo(new FlatHazardRate(today, new Handle <Quote>(new SimpleQuote(flatRate)), dayCounter)); CreditDefaultSwap cds2 = new CreditDefaultSwap(Protection.Side.Seller, notional, fixedRate, schedule, convention, cdsDayCount, true, true); cds2.setPricingEngine(new MidPointCdsEngine(probability, recoveryRate, discountCurve)); double NPV2 = cds2.NPV(); double tolerance = 1.0; if (Math.Abs(NPV - NPV2) > tolerance) { QAssert.Fail("failed to reproduce NPV with implied rate\n" + " expected: " + NPV + "\n" + " calculated: " + NPV2); } } } }
public void testHalton() { //("Testing Halton sequences..."); List <double> point; double tolerance = 1.0e-15; // testing "high" dimensionality int dimensionality = (int)SobolRsg.PPMT_MAX_DIM; HaltonRsg rsg = new HaltonRsg(dimensionality, 0, false, false); int points = 100, i, k; for (i = 0; i < points; i++) { point = rsg.nextSequence().value; if (point.Count != dimensionality) { QAssert.Fail("Halton sequence generator returns " + " a sequence of wrong dimensionality: " + point.Count + " instead of " + dimensionality) ; } } // testing first and second dimension (van der Corput sequence) double[] vanderCorputSequenceModuloTwo = { // first cycle (zero excluded) 0.50000, // second cycle 0.25000, 0.75000, // third cycle 0.12500, 0.62500, 0.37500, 0.87500, // fourth cycle 0.06250, 0.56250, 0.31250, 0.81250, 0.18750, 0.68750, 0.43750, 0.93750, // fifth cycle 0.03125, 0.53125, 0.28125, 0.78125, 0.15625, 0.65625, 0.40625, 0.90625, 0.09375, 0.59375, 0.34375, 0.84375, 0.21875, 0.71875, 0.46875, 0.96875, }; dimensionality = 1; rsg = new HaltonRsg(dimensionality, 0, false, false); points = (int)(Math.Pow(2.0, 5)) - 1; // five cycles for (i = 0; i < points; i++) { point = rsg.nextSequence().value; double error = Math.Abs(point[0] - vanderCorputSequenceModuloTwo[i]); if (error > tolerance) { QAssert.Fail(i + 1 + " draw (" + /*QL_FIXED*/ +point[0] + ") in 1-D Halton sequence is not in the " + "van der Corput sequence modulo two: " + "it should have been " + vanderCorputSequenceModuloTwo[i] //+ QL_SCIENTIFIC + " (error = " + error + ")"); } } double[] vanderCorputSequenceModuloThree = { // first cycle (zero excluded) 1.0 / 3, 2.0 / 3, // second cycle 1.0 / 9, 4.0 / 9, 7.0 / 9, 2.0 / 9, 5.0 / 9, 8.0 / 9, // third cycle 1.0 / 27, 10.0 / 27, 19.0 / 27, 4.0 / 27, 13.0 / 27, 22.0 / 27, 7.0 / 27, 16.0 / 27, 25.0 / 27, 2.0 / 27, 11.0 / 27, 20.0 / 27, 5.0 / 27, 14.0 / 27, 23.0 / 27, 8.0 / 27, 17.0 / 27, 26.0 / 27 }; dimensionality = 2; rsg = new HaltonRsg(dimensionality, 0, false, false); points = (int)(Math.Pow(3.0, 3)) - 1; // three cycles of the higher dimension for (i = 0; i < points; i++) { point = rsg.nextSequence().value; double error = Math.Abs(point[0] - vanderCorputSequenceModuloTwo[i]); if (error > tolerance) { QAssert.Fail("First component of " + i + 1 + " draw (" + /*QL_FIXED*/ +point[0] + ") in 2-D Halton sequence is not in the " + "van der Corput sequence modulo two: " + "it should have been " + vanderCorputSequenceModuloTwo[i] //+ QL_SCIENTIFIC + " (error = " + error + ")"); } error = Math.Abs(point[1] - vanderCorputSequenceModuloThree[i]); if (error > tolerance) { QAssert.Fail("Second component of " + i + 1 + " draw (" + /*QL_FIXED*/ +point[1] + ") in 2-D Halton sequence is not in the " + "van der Corput sequence modulo three: " + "it should have been " + vanderCorputSequenceModuloThree[i] //+ QL_SCIENTIFIC + " (error = " + error + ")"); } } // testing homogeneity properties dimensionality = 33; rsg = new HaltonRsg(dimensionality, 0, false, false); SequenceStatistics stat = new SequenceStatistics(dimensionality); List <double> mean; //, stdev, variance, skewness, kurtosis; k = 0; int j; for (j = 1; j < 5; j++) { // five cycle points = (int)(Math.Pow(2.0, j)) - 1; // base 2 for (; k < points; k++) { point = rsg.nextSequence().value; stat.add(point); } mean = stat.mean(); double error = Math.Abs(mean[0] - 0.5); if (error > tolerance) { QAssert.Fail("First dimension mean (" + /*QL_FIXED*/ +mean[0] + ") at the end of the " + j + 1 + " cycle in Halton sequence is not " + 0.5 //+ QL_SCIENTIFIC + " (error = " + error + ")"); } } // reset generator and gaussianstatistics rsg = new HaltonRsg(dimensionality, 0, false, false); stat.reset(dimensionality); k = 0; for (j = 1; j < 3; j++) { // three cycle points = (int)(Math.Pow(3.0, j)) - 1; // base 3 for (; k < points; k++) { point = rsg.nextSequence().value; stat.add(point); } mean = stat.mean(); double error = Math.Abs(mean[1] - 0.5); if (error > tolerance) { QAssert.Fail("Second dimension mean (" + /*QL_FIXED*/ +mean[1] + ") at the end of the " + j + 1 + " cycle in Halton sequence is not " + 0.5 //+ QL_SCIENTIFIC + " (error = " + error + ")"); } } }
public void testFairUpfront() { // Testing fair-upfront calculation for credit-default swaps... using (SavedSettings backup = new SavedSettings()) { // Initialize curves Calendar calendar = new TARGET(); Date today = calendar.adjust(Date.Today); Settings.setEvaluationDate(today); Handle <Quote> hazardRate = new Handle <Quote>(new SimpleQuote(0.01234)); RelinkableHandle <DefaultProbabilityTermStructure> probabilityCurve = new RelinkableHandle <DefaultProbabilityTermStructure>(); probabilityCurve.linkTo(new FlatHazardRate(0, calendar, hazardRate, new Actual360())); RelinkableHandle <YieldTermStructure> discountCurve = new RelinkableHandle <YieldTermStructure>(); discountCurve.linkTo(new FlatForward(today, 0.06, new Actual360())); // Build the schedule Date issueDate = today; Date maturity = calendar.advance(issueDate, 10, TimeUnit.Years); BusinessDayConvention convention = BusinessDayConvention.Following; Schedule schedule = new MakeSchedule().from(issueDate) .to(maturity) .withFrequency(Frequency.Quarterly) .withCalendar(calendar) .withTerminationDateConvention(convention) .withRule(DateGeneration.Rule.TwentiethIMM).value(); // Build the CDS double fixedRate = 0.05; double upfront = 0.001; DayCounter dayCount = new Actual360(); double notional = 10000.0; double recoveryRate = 0.4; IPricingEngine engine = new MidPointCdsEngine(probabilityCurve, recoveryRate, discountCurve, true); CreditDefaultSwap cds = new CreditDefaultSwap(Protection.Side.Seller, notional, upfront, fixedRate, schedule, convention, dayCount, true, true); cds.setPricingEngine(engine); double fairUpfront = cds.fairUpfront(); CreditDefaultSwap fairCds = new CreditDefaultSwap(Protection.Side.Seller, notional, fairUpfront, fixedRate, schedule, convention, dayCount, true, true); fairCds.setPricingEngine(engine); double fairNPV = fairCds.NPV(); double tolerance = 1e-10; if (Math.Abs(fairNPV) > tolerance) { QAssert.Fail( "Failed to reproduce null NPV with calculated fair upfront\n" + " calculated upfront: " + fairUpfront + "\n" + " calculated NPV: " + fairNPV); } // same with null upfront to begin with upfront = 0.0; CreditDefaultSwap cds2 = new CreditDefaultSwap(Protection.Side.Seller, notional, upfront, fixedRate, schedule, convention, dayCount, true, true); cds2.setPricingEngine(engine); fairUpfront = cds2.fairUpfront(); CreditDefaultSwap fairCds2 = new CreditDefaultSwap(Protection.Side.Seller, notional, fairUpfront, fixedRate, schedule, convention, dayCount, true, true); fairCds2.setPricingEngine(engine); fairNPV = fairCds2.NPV(); if (Math.Abs(fairNPV) > tolerance) { QAssert.Fail( "Failed to reproduce null NPV with calculated fair upfront\n" + " calculated upfront: " + fairUpfront + "\n" + " calculated NPV: " + fairNPV); } } }
public void testMonteCarloCapletPricing() { // Testing caplet LMM Monte-Carlo caplet pricing /* factor loadings are taken from Hull & White article * plus extra normalisation to get orthogonal eigenvectors * http://www.rotman.utoronto.ca/~amackay/fin/libormktmodel2.pdf */ double[] compValues = { 0.85549771, 0.46707264, 0.22353259, 0.91915359, 0.37716089, 0.11360610, 0.96438280, 0.26413316, -0.01412414, 0.97939148, 0.13492952, -0.15028753, 0.95970595, -0.00000000, -0.28100621, 0.97939148, -0.13492952, -0.15028753, 0.96438280, -0.26413316, -0.01412414, 0.91915359, -0.37716089, 0.11360610, 0.85549771, -0.46707264, 0.22353259 }; Matrix volaComp = new Matrix(9, 3); List <double> lcompValues = new InitializedList <double>(27, 0); List <double> ltemp = new InitializedList <double>(3, 0); lcompValues = compValues.ToList(); //std::copy(compValues, compValues+9*3, volaComp.begin()); for (int i = 0; i < 9; i++) { ltemp = lcompValues.GetRange(3 * i, 3); for (int j = 0; j < 3; j++) { volaComp[i, j] = ltemp[j]; } } LiborForwardModelProcess process1 = makeProcess(); LiborForwardModelProcess process2 = makeProcess(volaComp); List <double> tmp = process1.fixingTimes(); TimeGrid grid = new TimeGrid(tmp, tmp.Count, 12); List <int> location = new List <int>(); for (int i = 0; i < tmp.Count; ++i) { location.Add(grid.index(tmp[i])); } // set-up a small Monte-Carlo simulation to price caplets // and ratchet caps using a one- and a three factor libor market model ulong seed = 42; LowDiscrepancy.icInstance = new InverseCumulativeNormal(); IRNG rsg1 = (IRNG) new LowDiscrepancy().make_sequence_generator( process1.factors() * (grid.size() - 1), seed); IRNG rsg2 = (IRNG) new LowDiscrepancy().make_sequence_generator( process2.factors() * (grid.size() - 1), seed); MultiPathGenerator <IRNG> generator1 = new MultiPathGenerator <IRNG> (process1, grid, rsg1, false); MultiPathGenerator <IRNG> generator2 = new MultiPathGenerator <IRNG> (process2, grid, rsg2, false); const int nrTrails = 250000; List <GeneralStatistics> stat1 = new InitializedList <GeneralStatistics>(process1.size()); List <GeneralStatistics> stat2 = new InitializedList <GeneralStatistics>(process2.size()); List <GeneralStatistics> stat3 = new InitializedList <GeneralStatistics>(process2.size() - 1); for (int i = 0; i < nrTrails; ++i) { Sample <IPath> path1 = generator1.next(); Sample <IPath> path2 = generator2.next(); MultiPath value1 = path1.value as MultiPath; Utils.QL_REQUIRE(value1 != null, () => "Invalid Path"); MultiPath value2 = path2.value as MultiPath; Utils.QL_REQUIRE(value2 != null, () => "Invalid Path"); List <double> rates1 = new InitializedList <double>(len); List <double> rates2 = new InitializedList <double>(len); for (int j = 0; j < process1.size(); ++j) { rates1[j] = value1[j][location[j]]; rates2[j] = value2[j][location[j]]; } List <double> dis1 = process1.discountBond(rates1); List <double> dis2 = process2.discountBond(rates2); for (int k = 0; k < process1.size(); ++k) { double accrualPeriod = process1.accrualEndTimes()[k] - process1.accrualStartTimes()[k]; // caplet payoff function, cap rate at 4% double payoff1 = Math.Max(rates1[k] - 0.04, 0.0) * accrualPeriod; double payoff2 = Math.Max(rates2[k] - 0.04, 0.0) * accrualPeriod; stat1[k].add(dis1[k] * payoff1); stat2[k].add(dis2[k] * payoff2); if (k != 0) { // ratchet cap payoff function double payoff3 = Math.Max(rates2[k] - (rates2[k - 1] + 0.0025), 0.0) * accrualPeriod; stat3[k - 1].add(dis2[k] * payoff3); } } } double[] capletNpv = { 0.000000000000, 0.000002841629, 0.002533279333, 0.009577143571, 0.017746502618, 0.025216116835, 0.031608230268, 0.036645683881, 0.039792254012, 0.041829864365 }; double[] ratchetNpv = { 0.0082644895, 0.0082754754, 0.0082159966, 0.0082982822, 0.0083803357, 0.0084366961, 0.0084173270, 0.0081803406, 0.0079533814 }; for (int k = 0; k < process1.size(); ++k) { double calculated1 = stat1[k].mean(); double tolerance1 = stat1[k].errorEstimate(); double expected = capletNpv[k]; if (Math.Abs(calculated1 - expected) > tolerance1) { QAssert.Fail("Failed to reproduce expected caplet NPV" + "\n calculated: " + calculated1 + "\n error int: " + tolerance1 + "\n expected: " + expected); } double calculated2 = stat2[k].mean(); double tolerance2 = stat2[k].errorEstimate(); if (Math.Abs(calculated2 - expected) > tolerance2) { QAssert.Fail("Failed to reproduce expected caplet NPV" + "\n calculated: " + calculated2 + "\n error int: " + tolerance2 + "\n expected: " + expected); } if (k != 0) { double calculated3 = stat3[k - 1].mean(); double tolerance3 = stat3[k - 1].errorEstimate(); expected = ratchetNpv[k - 1]; double refError = 1e-5; // 1e-5. error bars of the reference values if (Math.Abs(calculated3 - expected) > tolerance3 + refError) { QAssert.Fail("Failed to reproduce expected caplet NPV" + "\n calculated: " + calculated3 + "\n error int: " + tolerance3 + refError + "\n expected: " + expected); } } } }
public void RiskStatisticsTest() { // ("Testing risk measures..."); IncrementalGaussianStatistics igs = new IncrementalGaussianStatistics(); RiskStatistics s = new RiskStatistics(); double[] averages = { -100.0, -1.0, 0.0, 1.0, 100.0 }; double[] sigmas = { 0.1, 1.0, 100.0 }; int i, j, k, N; N = (int)Math.Pow(2, 16) - 1; double dataMin, dataMax; List <double> data = new InitializedList <double>(N), weights = new InitializedList <double>(N); for (i = 0; i < averages.Length; i++) { for (j = 0; j < sigmas.Length; j++) { NormalDistribution normal = new NormalDistribution(averages[i], sigmas[j]); CumulativeNormalDistribution cumulative = new CumulativeNormalDistribution(averages[i], sigmas[j]); InverseCumulativeNormal inverseCum = new InverseCumulativeNormal(averages[i], sigmas[j]); SobolRsg rng = new SobolRsg(1); dataMin = double.MaxValue; dataMax = double.MinValue; for (k = 0; k < N; k++) { data[k] = inverseCum.value(rng.nextSequence().value[0]); dataMin = Math.Min(dataMin, data[k]); dataMax = Math.Max(dataMax, data[k]); weights[k] = 1.0; } igs.addSequence(data, weights); s.addSequence(data, weights); // checks double calculated, expected; double tolerance; if (igs.samples() != N) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong number of samples\n" + " calculated: " + igs.samples() + "\n" + " expected: " + N); } if (s.samples() != N) { QAssert.Fail("RiskStatistics: wrong number of samples\n" + " calculated: " + s.samples() + "\n" + " expected: " + N); } // weightSum() tolerance = 1e-10; expected = weights.Sum(); calculated = igs.weightSum(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong sum of weights\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.weightSum(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong sum of weights\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // min tolerance = 1e-12; expected = dataMin; calculated = igs.min(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong minimum value\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.min(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: " + "wrong minimum value\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // max expected = dataMax; calculated = igs.max(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong maximum value\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.max(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: " + "wrong maximum value\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // mean expected = averages[i]; tolerance = (expected == 0.0 ? 1.0e-13 : Math.Abs(expected) * 1.0e-13); calculated = igs.mean(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong mean value" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.mean(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong mean value" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // variance expected = sigmas[j] * sigmas[j]; tolerance = expected * 1.0e-1; calculated = igs.variance(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong variance" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.variance(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong variance" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // standardDeviation expected = sigmas[j]; tolerance = expected * 1.0e-1; calculated = igs.standardDeviation(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong standard deviation" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.standardDeviation(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong standard deviation" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // missing errorEstimate() test // skewness expected = 0.0; tolerance = 1.0e-4; calculated = igs.skewness(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong skewness" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.skewness(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong skewness" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // kurtosis expected = 0.0; tolerance = 1.0e-1; calculated = igs.kurtosis(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong kurtosis" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.kurtosis(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong kurtosis" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // percentile expected = averages[i]; tolerance = (expected == 0.0 ? 1.0e-3 : Math.Abs(expected * 1.0e-3)); calculated = igs.gaussianPercentile(0.5); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong Gaussian percentile" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.gaussianPercentile(0.5); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong Gaussian percentile" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.percentile(0.5); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong percentile" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // potential upside double upper_tail = averages[i] + 2.0 * sigmas[j], lower_tail = averages[i] - 2.0 * sigmas[j]; double twoSigma = cumulative.value(upper_tail); expected = Math.Max(upper_tail, 0.0); tolerance = (expected == 0.0 ? 1.0e-3 : Math.Abs(expected * 1.0e-3)); calculated = igs.gaussianPotentialUpside(twoSigma); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong Gaussian potential upside" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.gaussianPotentialUpside(twoSigma); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong Gaussian potential upside" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.potentialUpside(twoSigma); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong potential upside" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // just to check that GaussianStatistics<StatsHolder> does work StatsHolder h = new StatsHolder(s.mean(), s.standardDeviation()); GenericGaussianStatistics <StatsHolder> test = new GenericGaussianStatistics <StatsHolder>(h); expected = s.gaussianPotentialUpside(twoSigma); calculated = test.gaussianPotentialUpside(twoSigma); if (calculated != expected) { QAssert.Fail("GenericGaussianStatistics<StatsHolder> fails" + "\n calculated: " + calculated + "\n expected: " + expected); } // value-at-risk expected = -Math.Min(lower_tail, 0.0); tolerance = (expected == 0.0 ? 1.0e-3 : Math.Abs(expected * 1.0e-3)); calculated = igs.gaussianValueAtRisk(twoSigma); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong Gaussian value-at-risk" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.gaussianValueAtRisk(twoSigma); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong Gaussian value-at-risk" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.valueAtRisk(twoSigma); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong value-at-risk" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } if (averages[i] > 0.0 && sigmas[j] < averages[i]) { // no data will miss the targets: // skip the rest of this iteration igs.reset(); s.reset(); continue; } // expected shortfall expected = -Math.Min(averages[i] - sigmas[j] * sigmas[j] * normal.value(lower_tail) / (1.0 - twoSigma), 0.0); tolerance = (expected == 0.0 ? 1.0e-4 : Math.Abs(expected) * 1.0e-2); calculated = igs.gaussianExpectedShortfall(twoSigma); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong Gaussian expected shortfall" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.gaussianExpectedShortfall(twoSigma); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong Gaussian expected shortfall" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.expectedShortfall(twoSigma); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong expected shortfall" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // shortfall expected = 0.5; tolerance = (expected == 0.0 ? 1.0e-3 : Math.Abs(expected * 1.0e-3)); calculated = igs.gaussianShortfall(averages[i]); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong Gaussian shortfall" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.gaussianShortfall(averages[i]); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong Gaussian shortfall" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.shortfall(averages[i]); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong shortfall" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // average shortfall expected = sigmas[j] / Math.Sqrt(2.0 * Const.M_PI) * 2.0; tolerance = expected * 1.0e-3; calculated = igs.gaussianAverageShortfall(averages[i]); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong Gaussian average shortfall" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.gaussianAverageShortfall(averages[i]); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong Gaussian average shortfall" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.averageShortfall(averages[i]); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong average shortfall" + " for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // regret expected = sigmas[j] * sigmas[j]; tolerance = expected * 1.0e-1; calculated = igs.gaussianRegret(averages[i]); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong Gaussian regret(" + averages[i] + ") " + "for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.gaussianRegret(averages[i]); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: " + "wrong Gaussian regret(" + averages[i] + ") " + "for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.regret(averages[i]); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: " + "wrong regret(" + averages[i] + ") " + "for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // downsideVariance expected = s.downsideVariance(); tolerance = (expected == 0.0 ? 1.0e-3 : Math.Abs(expected * 1.0e-3)); calculated = igs.downsideVariance(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong downside variance" + "for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = igs.gaussianDownsideVariance(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong Gaussian downside variance" + "for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } // downsideVariance if (averages[i] == 0.0) { expected = sigmas[j] * sigmas[j]; tolerance = expected * 1.0e-3; calculated = igs.downsideVariance(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong downside variance" + "for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = igs.gaussianDownsideVariance(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("IncrementalGaussianStatistics: " + "wrong Gaussian downside variance" + "for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.downsideVariance(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong downside variance" + "for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } calculated = s.gaussianDownsideVariance(); if (Math.Abs(calculated - expected) > tolerance) { QAssert.Fail("RiskStatistics: wrong Gaussian downside variance" + "for N(" + averages[i] + ", " + sigmas[j] + ")\n" + " calculated: " + calculated + "\n" + " expected: " + expected + "\n" + " tolerance: " + tolerance); } } igs.reset(); s.reset(); } } }
public void testActualActualWithSemiannualSchedule() { // Testing actual/actual with schedule for undefined semiannual reference periods Calendar calendar = new UnitedStates(); Date fromDate = new Date(10, Month.January, 2017); Date firstCoupon = new Date(31, Month.August, 2017); Date quasiCoupon = new Date(28, Month.February, 2017); Date quasiCoupon2 = new Date(31, Month.August, 2016); Schedule schedule = new MakeSchedule() .from(fromDate) .withFirstDate(firstCoupon) .to(new Date(28, Month.February, 2026)) .withFrequency(Frequency.Semiannual) .withCalendar(calendar) .withConvention(BusinessDayConvention.Unadjusted) .backwards().endOfMonth(true).value(); Date testDate = schedule.date(1); DayCounter dayCounter = new ActualActual(ActualActual.Convention.ISMA, schedule); DayCounter dayCounterNoSchedule = new ActualActual(ActualActual.Convention.ISMA); Date referencePeriodStart = schedule.date(1); Date referencePeriodEnd = schedule.date(2); // Test QAssert.IsTrue(dayCounter.yearFraction(referencePeriodStart, referencePeriodStart).IsEqual(0.0), "This should be zero."); QAssert.IsTrue(dayCounterNoSchedule.yearFraction(referencePeriodStart, referencePeriodStart).IsEqual(0.0), "This should be zero"); QAssert.IsTrue(dayCounterNoSchedule.yearFraction(referencePeriodStart, referencePeriodStart, referencePeriodStart, referencePeriodStart).IsEqual(0.0), "This should be zero"); QAssert.IsTrue(dayCounter.yearFraction(referencePeriodStart, referencePeriodEnd).IsEqual(0.5), "This should be exact using schedule; " + referencePeriodStart + " to " + referencePeriodEnd + "Should be 0.5"); QAssert.IsTrue(dayCounterNoSchedule.yearFraction(referencePeriodStart, referencePeriodEnd, referencePeriodStart, referencePeriodEnd).IsEqual(0.5), "This should be exact for explicit reference periods with no schedule"); while (testDate < referencePeriodEnd) { double difference = dayCounter.yearFraction(testDate, referencePeriodEnd, referencePeriodStart, referencePeriodEnd) - dayCounter.yearFraction(testDate, referencePeriodEnd); if (Math.Abs(difference) > 1.0e-10) { QAssert.Fail("Failed to correctly use the schedule to find the reference period for Act/Act"); } testDate = calendar.advance(testDate, 1, TimeUnit.Days); } //Test long first coupon double calculatedYearFraction = dayCounter.yearFraction(fromDate, firstCoupon); double expectedYearFraction = 0.5 + ((double)dayCounter.dayCount(fromDate, quasiCoupon)) / (2 * dayCounter.dayCount(quasiCoupon2, quasiCoupon)); QAssert.IsTrue(Math.Abs(calculatedYearFraction - expectedYearFraction) < 1.0e-10, "Failed to compute the expected year fraction " + "\n expected: " + expectedYearFraction + "\n calculated: " + calculatedYearFraction); // test multiple periods schedule = new MakeSchedule() .from(new Date(10, Month.January, 2017)) .withFirstDate(new Date(31, Month.August, 2017)) .to(new Date(28, Month.February, 2026)) .withFrequency(Frequency.Semiannual) .withCalendar(calendar) .withConvention(BusinessDayConvention.Unadjusted) .backwards().endOfMonth(false).value(); Date periodStartDate = schedule.date(1); Date periodEndDate = schedule.date(2); dayCounter = new ActualActual(ActualActual.Convention.ISMA, schedule); while (periodEndDate < schedule.date(schedule.size() - 2)) { double expected = actualActualDaycountComputation(schedule, periodStartDate, periodEndDate); double calculated = dayCounter.yearFraction(periodStartDate, periodEndDate); if (Math.Abs(expected - calculated) > 1e-8) { QAssert.Fail("Failed to compute the correct year fraction " + "given a schedule: " + periodStartDate + " to " + periodEndDate + "\n expected: " + expected + " calculated: " + calculated); } periodEndDate = calendar.advance(periodEndDate, 1, TimeUnit.Days); } }
public void testCalibration() { // Testing calibration of a Libor forward model const int size = 14; const double tolerance = 8e-3; double[] capVols = { 0.145708, 0.158465, 0.166248, 0.168672, 0.169007, 0.167956, 0.166261, 0.164239, 0.162082, 0.159923, 0.157781, 0.155745, 0.153776, 0.151950, 0.150189, 0.148582, 0.147034, 0.145598, 0.144248 }; double[] swaptionVols = { 0.170595, 0.166844, 0.158306, 0.147444, 0.136930, 0.126833, 0.118135, 0.175963, 0.166359, 0.155203, 0.143712, 0.132769, 0.122947, 0.114310, 0.174455, 0.162265, 0.150539, 0.138734, 0.128215, 0.118470, 0.110540, 0.169780, 0.156860, 0.144821, 0.133537, 0.123167, 0.114363, 0.106500, 0.164521, 0.151223, 0.139670, 0.128632, 0.119123, 0.110330, 0.103114, 0.158956, 0.146036, 0.134555, 0.124393, 0.115038, 0.106996, 0.100064 }; IborIndex index = makeIndex(); LiborForwardModelProcess process = new LiborForwardModelProcess(size, index); Handle <YieldTermStructure> termStructure = index.forwardingTermStructure(); // set-up the model LmVolatilityModel volaModel = new LmExtLinearExponentialVolModel(process.fixingTimes(), 0.5, 0.6, 0.1, 0.1); LmCorrelationModel corrModel = new LmLinearExponentialCorrelationModel(size, 0.5, 0.8); LiborForwardModel model = new LiborForwardModel(process, volaModel, corrModel); int swapVolIndex = 0; DayCounter dayCounter = index.forwardingTermStructure().link.dayCounter(); // set-up calibration helper List <CalibrationHelper> calibrationHelper = new List <CalibrationHelper>(); int i; for (i = 2; i < size; ++i) { Period maturity = i * index.tenor(); Handle <Quote> capVol = new Handle <Quote>(new SimpleQuote(capVols[i - 2])); CalibrationHelper caphelper = new CapHelper(maturity, capVol, index, Frequency.Annual, index.dayCounter(), true, termStructure, CalibrationHelper.CalibrationErrorType.ImpliedVolError); caphelper.setPricingEngine(new AnalyticCapFloorEngine(model, termStructure)); calibrationHelper.Add(caphelper); if (i <= size / 2) { // add a few swaptions to test swaption calibration as well for (int j = 1; j <= size / 2; ++j) { Period len = j * index.tenor(); Handle <Quote> swaptionVol = new Handle <Quote>( new SimpleQuote(swaptionVols[swapVolIndex++])); CalibrationHelper swaptionHelper = new SwaptionHelper(maturity, len, swaptionVol, index, index.tenor(), dayCounter, index.dayCounter(), termStructure, CalibrationHelper.CalibrationErrorType.ImpliedVolError); swaptionHelper.setPricingEngine(new LfmSwaptionEngine(model, termStructure)); calibrationHelper.Add(swaptionHelper); } } } LevenbergMarquardt om = new LevenbergMarquardt(1e-6, 1e-6, 1e-6); //ConjugateGradient gc = new ConjugateGradient(); model.calibrate(calibrationHelper, om, new EndCriteria(2000, 100, 1e-6, 1e-6, 1e-6), new Constraint(), new List <double>()); // measure the calibration error double calculated = 0.0; for (i = 0; i < calibrationHelper.Count; ++i) { double diff = calibrationHelper[i].calibrationError(); calculated += diff * diff; } if (Math.Sqrt(calculated) > tolerance) { QAssert.Fail("Failed to calibrate libor forward model" + "\n calculated diff: " + Math.Sqrt(calculated) + "\n expected : smaller than " + tolerance); } }
public void testSpreadedCube() { // Testing spreaded swaption volatility cube CommonVars vars = new CommonVars(); List <List <Handle <Quote> > > parametersGuess = new InitializedList <List <Handle <Quote> > >(vars.cube.tenors.options.Count * vars.cube.tenors.swaps.Count); for (int i = 0; i < vars.cube.tenors.options.Count * vars.cube.tenors.swaps.Count; i++) { parametersGuess[i] = new InitializedList <Handle <Quote> >(4); parametersGuess[i][0] = new Handle <Quote>(new SimpleQuote(0.2)); parametersGuess[i][1] = new Handle <Quote>(new SimpleQuote(0.5)); parametersGuess[i][2] = new Handle <Quote>(new SimpleQuote(0.4)); parametersGuess[i][3] = new Handle <Quote>(new SimpleQuote(0.0)); } List <bool> isParameterFixed = new InitializedList <bool>(4, false); Handle <SwaptionVolatilityStructure> volCube = new Handle <SwaptionVolatilityStructure>( new SwaptionVolCube1x(vars.atmVolMatrix, vars.cube.tenors.options, vars.cube.tenors.swaps, vars.cube.strikeSpreads, vars.cube.volSpreadsHandle, vars.swapIndexBase, vars.shortSwapIndexBase, vars.vegaWeighedSmileFit, parametersGuess, isParameterFixed, true)); SimpleQuote spread = new SimpleQuote(0.0001); Handle <Quote> spreadHandle = new Handle <Quote>(spread); SwaptionVolatilityStructure spreadedVolCube = new SpreadedSwaptionVolatility(volCube, spreadHandle); List <double> strikes = new List <double>(); for (int k = 1; k < 100; k++) { strikes.Add(k * .01); } for (int i = 0; i < vars.cube.tenors.options.Count; i++) { for (int j = 0; j < vars.cube.tenors.swaps.Count; j++) { SmileSection smileSectionByCube = volCube.link.smileSection(vars.cube.tenors.options[i], vars.cube.tenors.swaps[j]); SmileSection smileSectionBySpreadedCube = spreadedVolCube.smileSection(vars.cube.tenors.options[i], vars.cube.tenors.swaps[j]); for (int k = 0; k < strikes.Count; k++) { double strike = strikes[k]; double diff = spreadedVolCube.volatility(vars.cube.tenors.options[i], vars.cube.tenors.swaps[j], strike) - volCube.link.volatility(vars.cube.tenors.options[i], vars.cube.tenors.swaps[j], strike); if (Math.Abs(diff - spread.value()) > 1e-16) { QAssert.Fail("\ndiff!=spread in volatility method:" + "\nexpiry time = " + vars.cube.tenors.options[i] + "\nswap length = " + vars.cube.tenors.swaps[j] + "\n atm strike = " + (strike) + "\ndiff = " + diff + "\nspread = " + spread.value()); } diff = smileSectionBySpreadedCube.volatility(strike) - smileSectionByCube.volatility(strike); if (Math.Abs(diff - spread.value()) > 1e-16) { QAssert.Fail("\ndiff!=spread in smile section method:" + "\nexpiry time = " + vars.cube.tenors.options[i] + "\nswap length = " + vars.cube.tenors.swaps[j] + "\n atm strike = " + (strike) + "\ndiff = " + diff + "\nspread = " + spread.value()); } } } } //testing observability Flag f = new Flag(); spreadedVolCube.registerWith(f.update); volCube.link.update(); if (!f.isUp()) { QAssert.Fail("SpreadedSwaptionVolatilityStructure does not propagate notifications"); } f.lower(); spread.setValue(.001); if (!f.isUp()) { QAssert.Fail("SpreadedSwaptionVolatilityStructure does not propagate notifications"); } }