public void MedianTest() { double[] samples = { 1, 5, 2, 5, 1, 7, 1, 9 }; EmpiricalDistribution target = new EmpiricalDistribution(samples); Assert.AreEqual(target.Median, target.InverseDistributionFunction(0.5)); }
public void EmpiricalDistributionConstructorTest5() { double[] samples = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 }; EmpiricalDistribution distribution = new EmpiricalDistribution(samples, FaultySmoothingRule(samples)); double mean = distribution.Mean; // 3 double median = distribution.Median; // 2.9999993064186787 double var = distribution.Variance; // 1.2941176470588236 double chf = distribution.CumulativeHazardFunction(x: 4.2); // 2.1972245773362191 double cdf = distribution.DistributionFunction(x: 4.2); // 0.88888888888888884 double pdf = distribution.ProbabilityDensityFunction(x: 4.2); // 0.15552784414141974 double lpdf = distribution.LogProbabilityDensityFunction(x: 4.2); // -1.8609305013898356 double hf = distribution.HazardFunction(x: 4.2); // 1.3997505972727771 double ccdf = distribution.ComplementaryDistributionFunction(x: 4.2); //0.11111111111111116 double icdf = distribution.InverseDistributionFunction(p: cdf); // 4.1999999999999993 double smoothing = distribution.Smoothing; // 1.9144923416414432 string str = distribution.ToString(); // Fn(x; S) Assert.AreEqual(samples, distribution.Samples); Assert.AreEqual(1.9144923416414432, smoothing, 1.0e-15); Assert.AreEqual(3.0, mean); Assert.AreEqual(2.9999993064186787, median); Assert.AreEqual(1.2941176470588236, var); Assert.AreEqual(2.1972245773362191, chf); Assert.AreEqual(0.88888888888888884, cdf); Assert.AreEqual(0.15552784414141974, pdf, 1e-15); Assert.AreEqual(-1.8609305013898356, lpdf); Assert.AreEqual(1.3997505972727771, hf, 1e-15); Assert.AreEqual(0.11111111111111116, ccdf); Assert.AreEqual(4.1999999999999993, icdf); Assert.AreEqual("Fn(x; S)", str); }
public void EmpiricalDistributionConstructorTest3() { double[] samples = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 }; EmpiricalDistribution distribution = new EmpiricalDistribution(samples); double mean = distribution.Mean; // 3 double median = distribution.Median; // 2.9999993064186787 double var = distribution.Variance; // 1.2941176470588236 double chf = distribution.CumulativeHazardFunction(x: 4.2); // 2.1972245773362191 double cdf = distribution.DistributionFunction(x: 4.2); // 0.88888888888888884 double pdf = distribution.ProbabilityDensityFunction(x: 4.2); // 0.181456280142802 double lpdf = distribution.LogProbabilityDensityFunction(x: 4.2); // -1.7067405350495708 double hf = distribution.HazardFunction(x: 4.2); // 1.6331065212852196 double ccdf = distribution.ComplementaryDistributionFunction(x: 4.2); //0.11111111111111116 double icdf = distribution.InverseDistributionFunction(p: cdf); // 4.1999999999999993 double smoothing = distribution.Smoothing; // 0.67595864392399474 string str = distribution.ToString(); // Fn(x; S) Assert.AreEqual(samples, distribution.Samples); Assert.AreEqual(0.67595864392399474, smoothing); Assert.AreEqual(3.0, mean); Assert.AreEqual(2.9999993064186787, median); Assert.AreEqual(1.2941176470588236, var); Assert.AreEqual(2.1972245773362191, chf); Assert.AreEqual(0.88888888888888884, cdf); Assert.AreEqual(0.18145628014280227, pdf); Assert.AreEqual(-1.7067405350495708, lpdf); Assert.AreEqual(1.6331065212852196, hf); Assert.AreEqual(0.11111111111111116, ccdf); Assert.AreEqual(4.1999999999999993, icdf); Assert.AreEqual("Fn(x; S)", str); }
public void WeightedEmpiricalDistributionConstructorTest() { double[] original = { 5, 5, 1, 4, 1, 2, 2, 3, 3, 3, 4, 3, 3, 3, 4, 3, 2, 3 }; var distribution = new EmpiricalDistribution(original); int[] weights = { 2, 1, 1, 1, 2, 3, 1, 3, 1, 1, 1, 1 }; double[] samples = { 5, 1, 4, 1, 2, 3, 4, 3, 4, 3, 2, 3 }; var target = new EmpiricalDistribution(samples, weights); Assert.AreEqual(distribution.Entropy, target.Entropy, 1e-10); Assert.AreEqual(distribution.Mean, target.Mean); Assert.AreEqual(distribution.Median, target.Median); Assert.AreEqual(distribution.Mode, target.Mode); Assert.AreEqual(distribution.Quartiles.Min, target.Quartiles.Min); Assert.AreEqual(distribution.Quartiles.Max, target.Quartiles.Max); Assert.AreEqual(distribution.Smoothing, target.Smoothing); Assert.AreEqual(distribution.StandardDeviation, target.StandardDeviation); Assert.AreEqual(distribution.Support.Min, target.Support.Min); Assert.AreEqual(distribution.Support.Max, target.Support.Max); Assert.AreEqual(distribution.Variance, target.Variance); Assert.IsTrue(target.Weights.IsEqual(weights.Divide(weights.Sum()))); Assert.AreEqual(target.Samples, samples); for (double x = 0; x < 6; x += 0.1) { double actual, expected; expected = distribution.ComplementaryDistributionFunction(x); actual = target.ComplementaryDistributionFunction(x); Assert.AreEqual(expected, actual); expected = distribution.CumulativeHazardFunction(x); actual = target.CumulativeHazardFunction(x); Assert.AreEqual(expected, actual); expected = distribution.DistributionFunction(x); actual = target.DistributionFunction(x); Assert.AreEqual(expected, actual); expected = distribution.HazardFunction(x); actual = target.HazardFunction(x); Assert.AreEqual(expected, actual, 1e-15); expected = distribution.InverseDistributionFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x)); actual = target.InverseDistributionFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x)); Assert.AreEqual(expected, actual); expected = distribution.LogProbabilityDensityFunction(x); actual = target.LogProbabilityDensityFunction(x); Assert.AreEqual(expected, actual, 1e-15); expected = distribution.ProbabilityDensityFunction(x); actual = target.ProbabilityDensityFunction(x); Assert.AreEqual(expected, actual, 1e-15); expected = distribution.QuantileDensityFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x)); actual = target.QuantileDensityFunction(Accord.Math.Tools.Scale(0, 6, 0, 1, x)); Assert.AreEqual(expected, actual, 1e-10); } }
public double[,] PFE(Product[] portfolioIn, Date valueDate, Date[] fwdValueDates, double[] percentiles) { CalculateAll(portfolioIn, valueDate, fwdValueDates); double[,] pfe = new double[fwdValueDates.Length, percentiles.Length]; for (int col = 0; col < regressedValues.GetLength(1); col++) { EmpiricalDistribution xDist = new EmpiricalDistribution(regressedValues.GetColumn(col)); for (int percCount = 0; percCount < percentiles.Length; percCount++) { pfe[col, percCount] = xDist.InverseDistributionFunction(percentiles[percCount]); } } return(pfe); }
/// <summary> /// Fits the cashflows to intrinsic functions of x. i.e. (x-K)^+ and (K-x)^+ /// </summary> /// <returns></returns> private double[][] GetIntrinsic(Date date, int order) { var col = _dates.FindIndex(d => d == date); var result = new double[_regressors.GetLength(0)][]; for (var regressorNumber = 0; regressorNumber < _regressors.GetLength(2); regressorNumber++) { // For each regressor get the partition of the possible values var xVec = GetSingleX(col, regressorNumber); var xDist = new EmpiricalDistribution(xVec); var strikes = new double[order - 1]; for (var i = 1; i < order; i++) { strikes[i - 1] = xDist.InverseDistributionFunction((double)i / order); } // Create the values of the basis functions for each regressor for (var row = 0; row < _regressors.GetLength(0); row++) { double[] rowValues; if (regressorNumber == 0 ) // On the first pass for the first regressor, create the rows on the result matrix. { rowValues = new double[1 + order * _regressors.GetLength(2)]; rowValues[0] = 1; result[row] = rowValues; } else { rowValues = result[row]; } var x = _regressors[row, col, regressorNumber]; rowValues[1 + regressorNumber * order] = Math.Max(0, strikes[0] - x); for (var orderCounter = 0; orderCounter < order - 1; orderCounter++) { rowValues[2 + regressorNumber * order + orderCounter] = Math.Max(0, x - strikes[orderCounter]); } } } return(result); }