Exemplo n.º 1
0
        public void CumulativeFunctionTest()
        {
            var p1 = new NormalDistribution(4.2, 1);
            var p2 = new NormalDistribution(7.0, 2);

            Independent <NormalDistribution> target = new Independent <NormalDistribution>(p1, p2);

            double[] x;
            double   actual, expected;

            x        = new double[] { 4.2, 7.0 };
            actual   = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);

            x        = new double[] { 0.0, 0.0 };
            actual   = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);

            x        = new double[] { 7.0, 4.2 };
            actual   = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);
        }
Exemplo n.º 2
0
        public static double Run(double[] mts, List <double> cLocal, NormalDistribution n, List <GammaDistribution> g,
                                 double x)
        {
            double retval = 0;
            double sgn    = x <= mts[0] ? 1 : -1;
            int    hival  = 1;

            // Array to hold Gamma CDF values.
            var f = new double[g.Count + 1];

            f[0] = 0.00;

            // Derive & return the CDF value for the given parameters.
            for (int i = 1; i <= g.Count; ++i)
            {
                f[i] = 1.00 - Math.Pow(sgn, i) *
                       g[i - 1].DistributionFunction(Math.Pow((x - mts[0]) / Math.Sqrt(2.00 * mts[2]), 2));
                hival  = Math.Abs(f[i] - (1.0 + Math.Pow(-1.0, i + 1))) < 1e-15 ? hival : 0;
                retval = retval + cLocal[i] * f[i];
            }

            // Return the CDF value, making sure the highest possible value is 1.00.
            if (hival == 1 && Math.Abs(n.DistributionFunction(x) - 1.00) < 1e-15)
            {
                return(1.00);
            }
            return(cLocal[0] * (retval + cLocal[cLocal.Count - 1] * n.DistributionFunction(x)));
        }
Exemplo n.º 3
0
            public MyRectangle MoveRectangle(int Epoint, int Esize)
            {
                double             stddev = 0.5;
                NormalDistribution normal = new NormalDistribution(0, stddev);
                var randomNumber          = rand.Next(3);
                var x = ((double)rand.Next((int)(stddev * 3 * 10000 * 2)) - (10000 * stddev * 3)) / 10000;

                if (randomNumber % 3 == 0)
                {
                    var numbX  = (int)Math.Round(normal.DistributionFunction(x) * Epoint);
                    int shiftX = rand.Next(2) % 2 == 1 ? -numbX : numbX;
                    return(new MyRectangle(new IntPoint(this.Center.X + shiftX, this.Center.Y), this.Size));
                }
                else if (randomNumber % 3 == 1)
                {
                    var numbY  = (int)Math.Round(normal.DistributionFunction(x) * Epoint);
                    int shiftY = rand.Next(2) % 2 == 1 ? -numbY : numbY;
                    return(new MyRectangle(new IntPoint(this.Center.X, this.Center.Y + shiftY), this.Size));
                }
                else
                {
                    var numbSize = (int)Math.Round(normal.DistributionFunction(x) * Esize);
                    int sizeDiff = rand.Next(2) % 2 == 1 ? -numbSize : numbSize;
                    return(new MyRectangle(Center, this.Size + sizeDiff));
                }
            }
Exemplo n.º 4
0
        public void ConstructorTest5()
        {
            var normal = new NormalDistribution(mean: 4, stdDev: 4.2);

            double mean   = normal.Mean;                                    // 4.0
            double median = normal.Median;                                  // 4.0
            double mode   = normal.Mode;                                    // 4.0
            double var    = normal.Variance;                                // 17.64

            double cdf  = normal.DistributionFunction(x: 1.4);              // 0.26794249453351904
            double pdf  = normal.ProbabilityDensityFunction(x: 1.4);        // 0.078423391448155175
            double lpdf = normal.LogProbabilityDensityFunction(x: 1.4);     // -2.5456330358182586

            double ccdf = normal.ComplementaryDistributionFunction(x: 1.4); // 0.732057505466481
            double icdf = normal.InverseDistributionFunction(p: cdf);       // 1.4

            double hf  = normal.HazardFunction(x: 1.4);                     // 0.10712736480747137
            double chf = normal.CumulativeHazardFunction(x: 1.4);           // 0.31189620872601354

            string str = normal.ToString(CultureInfo.InvariantCulture);     // N(x; μ = 4, σ² = 17.64)

            Assert.AreEqual(4.0, mean);
            Assert.AreEqual(4.0, median);
            Assert.AreEqual(4.0, mode);
            Assert.AreEqual(17.64, var);
            Assert.AreEqual(0.31189620872601354, chf);
            Assert.AreEqual(0.26794249453351904, cdf);
            Assert.AreEqual(0.078423391448155175, pdf);
            Assert.AreEqual(-2.5456330358182586, lpdf);
            Assert.AreEqual(0.10712736480747137, hf);
            Assert.AreEqual(0.732057505466481, ccdf);
            Assert.AreEqual(1.4, icdf);
            Assert.AreEqual("N(x; μ = 4, σ² = 17.64)", str);

            Assert.AreEqual(Accord.Math.Normal.Function(normal.ZScore(4.2)), normal.DistributionFunction(4.2));
            Assert.AreEqual(Accord.Math.Normal.Derivative(normal.ZScore(4.2)) / normal.StandardDeviation, normal.ProbabilityDensityFunction(4.2), 1e-16);
            Assert.AreEqual(Accord.Math.Normal.LogDerivative(normal.ZScore(4.2)) - Math.Log(normal.StandardDeviation), normal.LogProbabilityDensityFunction(4.2), 1e-15);

            var range1 = normal.GetRange(0.95);
            var range2 = normal.GetRange(0.99);
            var range3 = normal.GetRange(0.01);

            Assert.AreEqual(-2.9083852331961833, range1.Min);
            Assert.AreEqual(10.908385233196183, range1.Max);
            Assert.AreEqual(-5.7706610709715314, range2.Min);
            Assert.AreEqual(13.770661070971531, range2.Max);
            Assert.AreEqual(-5.7706610709715314, range3.Min);
            Assert.AreEqual(13.770661070971531, range3.Max);

            Assert.AreEqual(double.NegativeInfinity, normal.Support.Min);
            Assert.AreEqual(double.PositiveInfinity, normal.Support.Max);

            Assert.AreEqual(normal.InverseDistributionFunction(0), normal.Support.Min);
            Assert.AreEqual(normal.InverseDistributionFunction(1), normal.Support.Max);
        }
        public void ConstructorTest5()
        {
            var normal = new NormalDistribution(mean: 4, stdDev: 4.2);

            double mean = normal.Mean;     // 4.0
            double median = normal.Median; // 4.0
            double mode = normal.Mode;     // 4.0
            double var = normal.Variance;  // 17.64

            double cdf = normal.DistributionFunction(x: 1.4); // 0.26794249453351904
            double pdf = normal.ProbabilityDensityFunction(x: 1.4); // 0.078423391448155175
            double lpdf = normal.LogProbabilityDensityFunction(x: 1.4); // -2.5456330358182586

            double ccdf = normal.ComplementaryDistributionFunction(x: 1.4); // 0.732057505466481
            double icdf = normal.InverseDistributionFunction(p: cdf); // 1.4

            double hf = normal.HazardFunction(x: 1.4); // 0.10712736480747137
            double chf = normal.CumulativeHazardFunction(x: 1.4); // 0.31189620872601354

            string str = normal.ToString(CultureInfo.InvariantCulture); // N(x; μ = 4, σ² = 17.64)

            Assert.AreEqual(4.0, mean);
            Assert.AreEqual(4.0, median);
            Assert.AreEqual(4.0, mode);
            Assert.AreEqual(17.64, var);
            Assert.AreEqual(0.31189620872601354, chf);
            Assert.AreEqual(0.26794249453351904, cdf);
            Assert.AreEqual(0.078423391448155175, pdf);
            Assert.AreEqual(-2.5456330358182586, lpdf);
            Assert.AreEqual(0.10712736480747137, hf);
            Assert.AreEqual(0.732057505466481, ccdf);
            Assert.AreEqual(1.4, icdf);
            Assert.AreEqual("N(x; μ = 4, σ² = 17.64)", str);

            Assert.AreEqual(Accord.Math.Normal.Function(normal.ZScore(4.2)), normal.DistributionFunction(4.2));
            Assert.AreEqual(Accord.Math.Normal.Derivative(normal.ZScore(4.2)) / normal.StandardDeviation, normal.ProbabilityDensityFunction(4.2), 1e-16);
            Assert.AreEqual(Accord.Math.Normal.LogDerivative(normal.ZScore(4.2)) - Math.Log(normal.StandardDeviation), normal.LogProbabilityDensityFunction(4.2), 1e-15);

            var range1 = normal.GetRange(0.95);
            var range2 = normal.GetRange(0.99);
            var range3 = normal.GetRange(0.01);

            Assert.AreEqual(-2.9083852331961833, range1.Min);
            Assert.AreEqual(10.908385233196183, range1.Max);
            Assert.AreEqual(-5.7706610709715314, range2.Min);
            Assert.AreEqual(13.770661070971531, range2.Max);
            Assert.AreEqual(-5.7706610709715314, range3.Min);
            Assert.AreEqual(13.770661070971531, range3.Max);
        }
        public void CumulativeFunctionTest2()
        {
            double[] mean = { 4.2 };

            double[,] covariance = { { 1.4 } };

            var baseline = new NormalDistribution(4.2, System.Math.Sqrt(covariance[0, 0]));
            var target   = new MultivariateNormalDistribution(mean, covariance);

            for (int i = 0; i < 10; i++)
            {
                double x = (i - 2) / 10.0;

                {
                    double actual   = target.ProbabilityDensityFunction(x);
                    double expected = baseline.ProbabilityDensityFunction(x);
                    Assert.AreEqual(expected, actual, 1e-10);
                }

                {
                    double actual   = target.DistributionFunction(x);
                    double expected = baseline.DistributionFunction(x);
                    Assert.AreEqual(expected, actual);
                }

                {
                    double actual   = target.ComplementaryDistributionFunction(x);
                    double expected = baseline.ComplementaryDistributionFunction(x);
                    Assert.AreEqual(expected, actual);
                }
            }
        }
Exemplo n.º 7
0
        public void ConstructorTest5()
        {
            var normal = new NormalDistribution(mean: 4, stdDev: 4.2);

            double mean   = normal.Mean;                                    // 4.0
            double median = normal.Median;                                  // 4.0
            double var    = normal.Variance;                                // 17.64

            double cdf  = normal.DistributionFunction(x: 1.4);              // 0.26794249453351904
            double pdf  = normal.ProbabilityDensityFunction(x: 1.4);        // 0.078423391448155175
            double lpdf = normal.LogProbabilityDensityFunction(x: 1.4);     // -2.5456330358182586

            double ccdf = normal.ComplementaryDistributionFunction(x: 1.4); // 0.732057505466481
            double icdf = normal.InverseDistributionFunction(p: cdf);       // 1.4

            double hf  = normal.HazardFunction(x: 1.4);                     // 0.10712736480747137
            double chf = normal.CumulativeHazardFunction(x: 1.4);           // 0.31189620872601354

            string str = normal.ToString(CultureInfo.InvariantCulture);     // N(x; μ = 4, σ² = 17.64)

            Assert.AreEqual(4.0, mean);
            Assert.AreEqual(4.0, median);
            Assert.AreEqual(17.64, var);
            Assert.AreEqual(0.31189620872601354, chf);
            Assert.AreEqual(0.26794249453351904, cdf);
            Assert.AreEqual(0.078423391448155175, pdf);
            Assert.AreEqual(-2.5456330358182586, lpdf);
            Assert.AreEqual(0.10712736480747137, hf);
            Assert.AreEqual(0.732057505466481, ccdf);
            Assert.AreEqual(1.4, icdf);
            Assert.AreEqual("N(x; μ = 4, σ² = 17.64)", str);
        }
Exemplo n.º 8
0
        public void ConstructorTest5()
        {
            var normal = new NormalDistribution(mean: 4, stdDev: 4.2);

            double mean = normal.Mean;     // 4.0
            double median = normal.Median; // 4.0
            double var = normal.Variance;  // 17.64

            double cdf = normal.DistributionFunction(x: 1.4); // 0.26794249453351904
            double pdf = normal.ProbabilityDensityFunction(x: 1.4); // 0.078423391448155175
            double lpdf = normal.LogProbabilityDensityFunction(x: 1.4); // -2.5456330358182586

            double ccdf = normal.ComplementaryDistributionFunction(x: 1.4); // 0.732057505466481
            double icdf = normal.InverseDistributionFunction(p: cdf); // 1.4

            double hf = normal.HazardFunction(x: 1.4); // 0.10712736480747137
            double chf = normal.CumulativeHazardFunction(x: 1.4); // 0.31189620872601354

            string str = normal.ToString(CultureInfo.InvariantCulture); // N(x; μ = 4, σ² = 17.64)

            Assert.AreEqual(4.0, mean);
            Assert.AreEqual(4.0, median);
            Assert.AreEqual(17.64, var);
            Assert.AreEqual(0.31189620872601354, chf);
            Assert.AreEqual(0.26794249453351904, cdf);
            Assert.AreEqual(0.078423391448155175, pdf);
            Assert.AreEqual(-2.5456330358182586, lpdf);
            Assert.AreEqual(0.10712736480747137, hf);
            Assert.AreEqual(0.732057505466481, ccdf);
            Assert.AreEqual(1.4, icdf);
            Assert.AreEqual("N(x; μ = 4, σ² = 17.64)", str);
        }
        public void DistributionFunctionTest1()
        {
            var target = GeneralizedNormalDistribution.Normal(mean: 0.42, stdDev: 4.2);
            var normal = new NormalDistribution(mean: 0.42, stdDev: 4.2);

            for (double x = -10; x < 10; x += 0.0001)
            {
                double actual   = target.DistributionFunction(x);
                double expected = normal.DistributionFunction(x);
                Assert.AreEqual(expected, actual, 1e-10);
                Assert.IsFalse(Double.IsNaN(actual));
            }
        }
Exemplo n.º 10
0
        public void DistributionFunctionTest()
        {
            double x    = 3;
            double mean = 7;
            double dev  = 5;

            NormalDistribution target = new NormalDistribution(mean, dev);

            double expected = 0.211855398583397;
            double actual   = target.DistributionFunction(x);

            Assert.IsFalse(double.IsNaN(actual));
            Assert.AreEqual(expected, actual, 1e-15);
        }
Exemplo n.º 11
0
        public void DistributionFunctionTest3()
        {
            double expected, actual;

            // Test small variance
            NormalDistribution target = new NormalDistribution(1.0, double.Epsilon);

            expected = 0;
            actual   = target.DistributionFunction(0);
            Assert.AreEqual(expected, actual);

            expected = 0.5;
            actual   = target.DistributionFunction(1.0);
            Assert.AreEqual(expected, actual);

            expected = 1.0;
            actual   = target.DistributionFunction(1.0 + 1e-15);
            Assert.AreEqual(expected, actual);

            expected = 0.0;
            actual   = target.DistributionFunction(1.0 - 1e-15);
            Assert.AreEqual(expected, actual);
        }
        public void SetBlackScholesPrice()
        {
            double d1 = (1 / (AnnualVolatility * Math.Sqrt(ValuationTimeSpan.Years())))
                        * (Math.Log(StockPrice / Strike) + (RiskFreeRate + 0.5 * Math.Pow(AnnualVolatility, 2)) * ValuationTimeSpan.Years());

            double d2 = d1 - AnnualVolatility * Math.Sqrt(ValuationTimeSpan.Years());

            NormalDistribution normalDistribution = new NormalDistribution();

            if (OptionType == Enums.OptionType.Call)
            {
                BlackScholesValue = normalDistribution.DistributionFunction(d1) * StockPrice
                                    - normalDistribution.DistributionFunction(d2) * Strike * Math.Exp(-RiskFreeRate * ValuationTimeSpan.Years());
            }
            else if (OptionType == Enums.OptionType.Put)
            {
                BlackScholesValue = -normalDistribution.DistributionFunction(-d1) * StockPrice
                                    + normalDistribution.DistributionFunction(-d2) * Strike * Math.Exp(-RiskFreeRate * ValuationTimeSpan.Years());
            }
            else
            {
                throw new Exception($"Failed: OptionType {OptionType.ToString()} is not supported");
            }
        }
Exemplo n.º 13
0
        public void ConstructorTest2()
        {
            var original = new NormalDistribution(mean: 4, stdDev: 4.2);

            var normal = GeneralContinuousDistribution.FromDensityFunction(
                original.Support, original.ProbabilityDensityFunction);

            for (double i = -10; i < +10; i += 0.1)
            {
                double expected = original.DistributionFunction(i);
                double actual   = normal.DistributionFunction(i);

                double diff = Math.Abs(expected - actual);
                Assert.AreEqual(expected, actual, 1e-6);
            }

            testNormal(normal, 1);
        }
Exemplo n.º 14
0
        //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~`


        public static void detectAnomalies(int[] results, int training, float tolerance)
        {
            int low;

            low = (results.Length - training) / 2;
            int[] sample = new int[training];
            for (int i = 0; i < training; i++)
            {
                sample[i] = results[low + i];
            }


            double mean;
            double std;


            mean = Measures.Mean(sample);
            std  = Measures.StandardDeviation(sample, mean);

            if (std > 0)
            {
                NormalDistribution norm = new NormalDistribution(mean, std);

                double zScore;
                for (int i = 0; i < results.Length; i++)
                {
                    zScore = Math.Abs(results[i] - mean) / std;
                    if (2 * (1 - norm.DistributionFunction(zScore)) < (1 - tolerance))
                    {
                        results[i] = int.MinValue;
                    }
                }
            }
            else
            {
                for (int i = 0; i < results.Length; i++)
                {
                    if (results[i] != mean)
                    {
                        results[i] = int.MinValue;
                    }
                }
            }
        }
Exemplo n.º 15
0
        public async Task <double> CalculatePercentageOfCompletion(double sumEstimations, double sumVariances, double desiredCompletionTime, double zScorePow = 0.5)
        {
            double percentage = 0;
            double zScore     = (desiredCompletionTime - sumEstimations) / Math.Pow(sumVariances, zScorePow);

            if (!double.IsNaN(zScore))
            {
                var normal = new NormalDistribution();
                percentage = normal.DistributionFunction(zScore) * 100;

                percentage = percentage < 0 ? 0 : percentage;
                percentage = percentage > 100 ? 100 : percentage;
            }
            else
            {
                percentage = 100;
            }

            return(Math.Round(percentage, 3));
        }
Exemplo n.º 16
0
        public void ConstructorTest3()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Create a normal distribution with mean 2 and sigma 3
            var normal = new NormalDistribution(mean: 2, stdDev: 3);

            // In a normal distribution, the median and
            // the mode coincide with the mean, so

            double mean   = normal.Mean;   // 2
            double mode   = normal.Mode;   // 2
            double median = normal.Median; // 2

            // The variance is the square of the standard deviation
            double variance = normal.Variance; // 3² = 9

            // Let's check what is the cumulative probability of
            // a value less than 3 occurring in this distribution:
            double cdf = normal.DistributionFunction(3); // 0.63055

            // Finally, let's generate 1000 samples from this distribution
            // and check if they have the specified mean and standard dev.

            double[] samples = normal.Generate(10000);


            double sampleMean = samples.Mean();              // 2.00
            double sampleDev  = samples.StandardDeviation(); // 3.00

            Assert.AreEqual(2, mean);
            Assert.AreEqual(2, mode);
            Assert.AreEqual(2, median);

            Assert.AreEqual(9, variance);
            Assert.AreEqual(10000, samples.Length);
            Assert.AreEqual(2.000, sampleMean, 5e-3);
            Assert.AreEqual(3.000, sampleDev, 5e-3);
        }
        public void ConstructorTest1()
        {
            NormalDistribution normal = new NormalDistribution(4.2, 1.2);
            MultivariateNormalDistribution target = new MultivariateNormalDistribution(new[] { 4.2 }, new[,] { { 1.2 * 1.2 } });

            double[] mean = target.Mean;
            double[] median = target.Median;
            double[] var = target.Variance;
            double[,] cov = target.Covariance;

            double apdf1 = target.ProbabilityDensityFunction(new double[] { 2 });
            double apdf2 = target.ProbabilityDensityFunction(new double[] { 4 });
            double apdf3 = target.ProbabilityDensityFunction(new double[] { 3 });
            double alpdf = target.LogProbabilityDensityFunction(new double[] { 3 });
            double acdf = target.DistributionFunction(new double[] { 3 });
            double accdf = target.ComplementaryDistributionFunction(new double[] { 3 });

            double epdf1 = normal.ProbabilityDensityFunction(2);
            double epdf2 = normal.ProbabilityDensityFunction(4);
            double epdf3 = normal.ProbabilityDensityFunction(3);
            double elpdf = normal.LogProbabilityDensityFunction(3);
            double ecdf = normal.DistributionFunction(3);
            double eccdf = normal.ComplementaryDistributionFunction(3);


            Assert.AreEqual(normal.Mean, target.Mean[0]);
            Assert.AreEqual(normal.Median, target.Median[0]);
            Assert.AreEqual(normal.Variance, target.Variance[0]);
            Assert.AreEqual(normal.Variance, target.Covariance[0, 0]);

            Assert.AreEqual(epdf1, apdf1);
            Assert.AreEqual(epdf2, apdf2);
            Assert.AreEqual(epdf3, apdf3);
            Assert.AreEqual(elpdf, alpdf);
            Assert.AreEqual(ecdf, acdf);
            Assert.AreEqual(eccdf, accdf);
            Assert.AreEqual(1.0 - ecdf, eccdf);
        }
 protected override double InnerProbabilityDensityFunction(double x)
 {
     return 1d / (ub - ua) * (baseDistributions.DistributionFunction((x - nm - ua) / ns) - baseDistributions.DistributionFunction((x - nm - ub) / ns));
 }
Exemplo n.º 19
0
        public void DistributionFunctionTest3()
        {
            double expected, actual;

            // Test small variance
            NormalDistribution target = new NormalDistribution(1.0, double.Epsilon);

            expected = 0;
            actual = target.DistributionFunction(0);
            Assert.AreEqual(expected, actual);

            expected = 0.5;
            actual = target.DistributionFunction(1.0);
            Assert.AreEqual(expected, actual);

            expected = 1.0;
            actual = target.DistributionFunction(1.0 + 1e-15);
            Assert.AreEqual(expected, actual);

            expected = 0.0;
            actual = target.DistributionFunction(1.0 - 1e-15);
            Assert.AreEqual(expected, actual);
        }
Exemplo n.º 20
0
        public void DistributionFunctionTest()
        {
            double x = 3;
            double mean = 7;
            double dev = 5;

            NormalDistribution target = new NormalDistribution(mean, dev);

            double expected = 0.211855398583397;
            double actual = target.DistributionFunction(x);

            Assert.IsFalse(double.IsNaN(actual));
            Assert.AreEqual(expected, actual, 1e-15);
        }
Exemplo n.º 21
0
        public void ConstructorTest3()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Create a normal distribution with mean 2 and sigma 3
            var normal = new NormalDistribution(mean: 2, stdDev: 3);

            // In a normal distribution, the median and
            // the mode coincide with the mean, so

            double mean = normal.Mean;     // 2
            double mode = normal.Mode;     // 2
            double median = normal.Median; // 2

            // The variance is the square of the standard deviation
            double variance = normal.Variance; // 3² = 9

            // Let's check what is the cumulative probability of
            // a value less than 3 occurring in this distribution:
            double cdf = normal.DistributionFunction(3); // 0.63055

            // Finally, let's generate 1000 samples from this distribution
            // and check if they have the specified mean and standard dev.

            double[] samples = normal.Generate(1000);

            double sampleMean = samples.Mean();             // 1.92
            double sampleDev = samples.StandardDeviation(); // 3.00

            Assert.AreEqual(2, mean);
            Assert.AreEqual(2, mode);
            Assert.AreEqual(2, median);

            Assert.AreEqual(9, variance);
            Assert.AreEqual(1000, samples.Length);
            Assert.AreEqual(1.9245, sampleMean, 1e-4);
            Assert.AreEqual(3.0008, sampleDev, 1e-4);
        }
        public void CumulativeFunctionTest2()
        {
            double[] mean = { 4.2 };

            double[,] covariance = { { 1.4 } };

            var baseline = new NormalDistribution(4.2, System.Math.Sqrt(covariance[0, 0]));
            var target = new MultivariateNormalDistribution(mean, covariance);

            for (int i = 0; i < 10; i++)
            {
                double x = (i - 2) / 10.0;

                {
                    double actual = target.ProbabilityDensityFunction(x);
                    double expected = baseline.ProbabilityDensityFunction(x);
                    Assert.AreEqual(expected, actual, 1e-10);
                }

                {
                    double actual = target.DistributionFunction(x);
                    double expected = baseline.DistributionFunction(x);
                    Assert.AreEqual(expected, actual);
                }

                {
                    double actual = target.ComplementaryDistributionFunction(x);
                    double expected = baseline.ComplementaryDistributionFunction(x);
                    Assert.AreEqual(expected, actual);
                }
            }
        }
Exemplo n.º 23
0
        public void CumulativeFunctionTest()
        {
            var p1 = new NormalDistribution(4.2, 1);
            var p2 = new NormalDistribution(7.0, 2);

            Independent<NormalDistribution> target = new Independent<NormalDistribution>(p1, p2);

            double[] x;
            double actual, expected;

            x = new double[] { 4.2, 7.0 };
            actual = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);

            x = new double[] { 0.0, 0.0 };
            actual = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);

            x = new double[] { 7.0, 4.2 };
            actual = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);
        }
Exemplo n.º 24
0
        public static double Run(string type, double[] prms, double[] a, int fxTD, double rf0, long n, int nbuckets,
                                 int prec, double stdpnr, long npnr, double alpha, int plproc, double[] gradvctr)
        {
            int[]              prtls    = { -1, -1, -1, -1 };
            double[]           k        = new double[fxTD];
            double             maxval   = 0;
            NormalDistribution normdist = new NormalDistribution(0.00, 1.00);

            // Iterate over each time point deriving each gradient entry.
            Trace.WriteLine("");
            Trace.Write("Building gradient ");
            for (int g = 0; g < fxTD; ++g)
            {
                // Construct the constant needed for gradient entries.
                k[g] = Funcs.vp(prms, a[g]) / (2.00 * Funcs.v(prms, a[g])) +
                       Math.Pow(Funcs.mp(prms), 2) / (2.00 * Funcs.vp(prms, a[g]));

                // Populate the gradient vector for this time point.
                prtls[0] = g;
                double grdpnr = GetPNR.Run(type, prms, a, fxTD, rf0, n, nbuckets, prec, prtls, plproc);
                gradvctr[g] = k[g] * (grdpnr - stdpnr);
                Trace.Write(".");

                // Maximum effective absolute value of this gradient vector.
                if (a[g] + gradvctr[g] > 1.00)
                {
                    if (maxval < 1.00 - a[g])
                    {
                        maxval = 1.00 - a[g];
                    }
                }
                else if (a[g] + gradvctr[g] < Funcs.mva(prms) + 0.0001)
                {
                    if (a[g] - (Funcs.mva(prms) + 0.0001) > maxval)
                    {
                        maxval = a[g] - (Funcs.mva(prms) + 0.0001);
                    }
                }
                else if (Math.Abs(gradvctr[g]) > maxval)
                {
                    maxval = Math.Abs(gradvctr[g]);
                }
            }
            Trace.WriteLine(" (Done)");

            // Print the unadjusted gradient vector entries.
            Trace.WriteLine("");
            Trace.Write("Gradient (no adjustment):");
            WrtAry.Run(-1, gradvctr, "Grd", fxTD);

            // If using simulation, test each element for equality with zero. If test holds set the element to zero.
            if (type == "sim" && alpha < 1.00)
            {
                maxval = 0;
                for (int g = 0; g < fxTD; ++g)
                {
                    double cmbpnr = (n * (stdpnr + gradvctr[g] / k[g]) + npnr * stdpnr) / (n + npnr);
                    double ts     = (stdpnr + gradvctr[g] / k[g] - stdpnr) /
                                    Math.Sqrt(cmbpnr * (1.00 - cmbpnr) * (1.00 / n + 1.00 / npnr));
                    double pval = 2.00 * Math.Min(normdist.DistributionFunction(ts),
                                                  1.00 - normdist.DistributionFunction(ts));
                    if (pval > alpha)
                    {
                        gradvctr[g] = 0.00; // The element is not different from zero at significance level alpha.
                    }
                    // Maximum effective absolute value of this gradient vector.
                    if (a[g] + gradvctr[g] > 1.00)
                    {
                        if (maxval < 1.00 - a[g])
                        {
                            maxval = 1.00 - a[g];
                        }
                    }
                    else if (a[g] + gradvctr[g] < Funcs.mva(prms) + 0.0001)
                    {
                        if (a[g] - (Funcs.mva(prms) + 0.0001) > maxval)
                        {
                            maxval = a[g] - (Funcs.mva(prms) + 0.0001);
                        }
                    }
                    else if (Math.Abs(gradvctr[g]) > maxval)
                    {
                        maxval = Math.Abs(gradvctr[g]);
                    }
                }

                // Print the adjusted gradient vector entries.
                Trace.WriteLine("");
                Trace.Write("ADJUSTED gradient:");
                WrtAry.Run(-1, gradvctr, "Adj-Grd", fxTD);
            }

            // Display the maximum effective absolute value of the gradient vector (used to determine convergence).
            Trace.WriteLine("");
            Trace.WriteLine($"Maximum effective absolute value of this gradient: {maxval:F10}");

            // This function returns the maximum absolute value of the gradient elements.
            // (Which is used to define the stopping/convergence criteria.)
            return(maxval);
        }
Exemplo n.º 25
0
        /* Calculate the prob the value fall into normal probability distribution */
        public double NormDistZ(double Value)
        {
            var NormDist = new NormalDistribution(mean: 0, stdDev: 1);

            return(NormDist.DistributionFunction(Value));
        }
Exemplo n.º 26
0
 public double PriceCallOption()
 {
     return(modelParams.S * normalDistribution.DistributionFunction(D1) - modelParams.K * Math.Exp(-modelParams.r * modelParams.t) * normalDistribution.DistributionFunction(D2));
 }
Exemplo n.º 27
0
        public static double Run(string type, double[] prms, double[] gpath, int fxTD, double rf0, long n, int nbuckets, int prec, int plproc, double mxgrd, double stdpnr, long npnr, double[] grdnt, double alpha, int nitrs = 0)
        {
            double maxpnr = stdpnr;
            double iter   = 1.00;

            int[] prtls    = { -1, -1, -1, -1 };
            long  maxn     = npnr;
            int   cont     = 1;
            int   fstimpr  = 0;
            int   iindx    = 0;
            int   tryup    = 0;
            int   origindx = 0;
            NormalDistribution normdist = new NormalDistribution(0.00, 1.00);

            // Get initial glidepath provided, assign to both original GP array and prior GP array.
            var prevGP = new double[fxTD];
            var origGP = new double[fxTD];

            for (int y = 0; y < fxTD; ++y)
            {
                origGP[y] = prevGP[y] = gpath[y];
            }

            // Define the step size for climbing. The step size depends on the largest gradient element and grows exponentially.
            // This is a heuristic that has worked well and can be modified if desired. Better step sizes can reduce runtimes.
            for (int i = 1; i <= 10; ++i)
            {
                if (mxgrd >= 1.00 / (10.00 * Math.Pow(10.00, i)) && mxgrd < 1.00 / (10.00 * Math.Pow(10.00, i - 1)))
                {
                    iindx = i;
                    iter  = Math.Pow(Math.Exp(Math.Log(5.00) / 4.00), iindx);
                }
            }

            // Output details for the current iteration.
            Trace.WriteLine(new String('=', 70));
            Trace.WriteLine($"Iteration step size = {iter:F10}");
            Trace.Write("Trying to improve on success probability = ");
            Trace.WriteLine(stdpnr);
            Trace.WriteLine(new String('=', 70));
            // Climb in the direction of the gradient.
            for (int t = 0; cont == 1 || fstimpr == 0; ++t) // Iterate until no more progress is made
            {
                if (cont == 0 && fstimpr == 0)              // If no progress is made reduce step size and try again.
                {
                    // Set the original index value when entering this problematic scenario.
                    if (origindx == 0)
                    {
                        origindx = iindx;
                    }
                    // Adjust glidepath back one iteration since it failed to improve the probability.
                    for (int y = 0; y < fxTD; ++y)
                    {
                        gpath[y] = prevGP[y]; // Reverse final update, since no progress was made.
                    }
                    if (iindx != 0)
                    {
                    }
                    Trace.WriteLine("");
                    Trace.Write("No Progress Made: Iteration step size changed from ");
                    Trace.Write(iter);
                    if (iindx == 0)
                    {
                        Trace.WriteLine(" to 0.00. (No additional climbing attempts will be made.)");
                        Trace.WriteLine("");
                        Trace.WriteLine("ERROR: No progress can be made, the procedure is stuck. (Step size has been reduced to 0.)");
                        Trace.WriteLine(" You may be operating along the boundary where the process is not well defined or your");
                        Trace.WriteLine(" estimation/approximation precision level is not adequate for your epsilon level.");
                        Trace.WriteLine("");
                        Trace.WriteLine("");
                        Trace.Write("Current Glide-Path: ");
                        WrtAry.Run(maxpnr, gpath, "GP", fxTD);
                        Trace.WriteLine("");
                        Trace.WriteLine("EXITING...Climb()...");
                        Console.Read();
                        Environment.Exit(1);
                    }
                    else if (iindx == 1 && tryup == 5)
                    {
                        iindx = iindx - 1;
                        iter  = iter / 2.00;
                    }
                    else if (iindx > 1 && tryup == 5)
                    {
                        if (iindx == origindx + 5)
                        {
                            iindx = origindx - 1;
                        }
                        else
                        {
                            iindx = iindx - 1;
                        }
                        iter = Math.Pow(Math.Exp(Math.Log(5.00) / 4.00), iindx);
                    }
                    else if (iindx > 1 && tryup < 5)
                    {
                        iindx = iindx + 1;
                        iter  = Math.Pow(Math.Exp(Math.Log(5.00) / 4.00), iindx);
                        tryup = tryup + 1;
                    }
                    Trace.Write(" to ");
                    Trace.Write(iter);
                    Trace.WriteLine(". (Attempting to climb again.)");
                    cont = 1;
                }
                for (int y = 0; y < fxTD; ++y)                // Iterate over glide-path and update it
                {
                    prevGP[y] = gpath[y];                     // Reset the previous glide-path element
                    gpath[y]  = gpath[y] + (iter) * grdnt[y]; // Update each individual glide-path element
                    if (gpath[y] < Funcs.mva(prms) + 0.0001)
                    {
                        gpath[y] = Funcs.mva(prms) + 0.0001; // Stay above MVA and consistent with ThrdPNRdyn() and ThrdPNRsim().
                    }
                    else if (gpath[y] > 1.00)
                    {
                        gpath[y] = 1.00; // Consistent with ThrdPNRdyn() and ThrdPNRsim().
                    }
                }
                double newpnr = GetPNR.Run(type, prms, gpath, fxTD, rf0, n, nbuckets, prec, prtls, plproc);
                Trace.WriteLine("");
                Trace.Write("Base Prob(NR) = ");
                Trace.Write(maxpnr);
                if (type == "sim")
                {
                    Trace.Write(" (N=");
                    Trace.Write(maxn);
                    Trace.Write(")");
                }
                Trace.WriteLine("");
                Trace.Write("New Prob(NR) = ");
                Trace.Write(newpnr);
                if (type == "sim")
                {
                    Trace.Write($" (N={n})");
                }
                else if (newpnr > maxpnr)
                {
                    Trace.Write(" (Better, CONTINUE climbing ...)");
                }
                else
                {
                    Trace.Write(" (Worse, STOP climbing ...)");
                }
                Trace.WriteLine("");

                // If using simulation, conduct a non-inferiority test of the new vs max base GP.
                // =====> Continue to climb if the new GP is at least as good as the max base GP.
                // Otherwise, compare new probability with old and climb while making progress.
                if (type == "sim")
                {
                    double cmbvar = maxpnr * (1.00 - maxpnr) / maxn + newpnr * (1.00 - newpnr) / n;
                    double ts     = (newpnr - maxpnr) / Math.Sqrt(cmbvar);
                    double pval   = normdist.DistributionFunction(ts);
                    Trace.Write("Test Statistic = ");
                    Trace.WriteLine(ts);
                    Trace.Write("P-Value = ");
                    Trace.Write(pval);
                    Trace.Write(" (Alpha=");
                    Trace.Write(alpha);
                    Trace.WriteLine(")");
                    if (pval > alpha)
                    {
                        fstimpr = 1;
                        Trace.WriteLine("=====> Accept Ho (non-inferiority), CONTINUE climbing ...");
                    }
                    else
                    {
                        cont = 0;
                        Trace.WriteLine("=====> Reject Ho (non-inferiority), STOP climbing ...");
                    }
                    // Update PNR and sample size for base GP.
                    if (newpnr > maxpnr)
                    {
                        maxpnr = newpnr;
                        maxn   = n;
                    }
                }
                else if (type == "dp")
                {
                    if (newpnr > maxpnr)
                    {
                        fstimpr = 1;
                        maxpnr  = newpnr;
                    }
                    else
                    {
                        cont = 0;
                    }
                }
                // For lengthy climbing, display the current glidepath at 100 iteration intervals.
                if ((t + 1) % 100 == 0)
                {
                    Trace.WriteLine("");
                    Trace.Write("Current Glide-Path at Iteration: ");
                    Trace.Write(t + 1);
                    WrtAry.Run(newpnr, gpath, "GP", fxTD);
                }
                // Stop when maximum number of iterations has been reached, if specified.
                //=========================================================================
                if (nitrs > 0 && t + 1 == nitrs && cont == 1)
                {
                    Trace.WriteLine("");
                    Trace.WriteLine($"Climbing limit reached at {nitrs} iterations.");
                    cont = 2;
                }
            }

            // Adjust glidepath back one iteration since it failed to improve the probability.
            // (This is only done when climbing failed to improve, not when limit is reached.)
            if (cont != 2)
            {
                for (int y = 0; y < fxTD; ++y)
                {
                    gpath[y] = prevGP[y];
                }
            }
            if (type == "sim")
            {
                Trace.WriteLine("");
                Trace.WriteLine("Resetting the probability (to remove any built-in upward sampling bias) ...");
                maxpnr = ThrdPNRsim.Run(prms, gpath, fxTD, rf0, 2 * n, prtls, plproc);
            }

            // Return the max success probability.
            return(maxpnr);
        }
        public void DistributionFunctionTest1()
        {
            var target = GeneralizedNormalDistribution.Normal(mean: 0.42, stdDev: 4.2);
            var normal = new NormalDistribution(mean: 0.42, stdDev: 4.2);

            for (double x = -10; x < 10; x += 0.0001)
            {
                double actual = target.DistributionFunction(x);
                double expected = normal.DistributionFunction(x);
                Assert.AreEqual(expected, actual, 1e-10);
                Assert.IsFalse(Double.IsNaN(actual));
            }
        }