/// <summary> Takes a set of normal distributions and computes the probability that each one would produce the minimal value if /// a point sample was taken from each. Integration is performed by Simpson's 3/8 rule. </summary> /// <param name="distributions">The array of distributions to compare</param> /// <param name="iterations">The number of iterations to use in Simpson's 3/8 Rule. Defaults to 150.</param> /// <returns>An array of probabilities that each distribution will produce the minimum value if a point sample was taken from each. /// [i] = P(X_i less than min(all X_j))</returns> public static double[] Simpsons38RuleWithoutNegation(Normal[] distributions, int iterations = 150) { double[] complementProbs = new double[distributions.Length]; for (int i = 0; i < distributions.Length; i++) { Normal distribution_i = distributions[i]; Func <double, double> integrand = x => { double product = distribution_i.Density(x); for (int j = 0; j < distributions.Length; j++) { if (j != i) { product *= 1 - distributions[j].CumulativeDistribution(x); } } return(product); }; complementProbs[i] = NewtonCotes.Simpsons38Rule(integrand, distribution_i.Mean - 8 * distribution_i.StdDev, distribution_i.Mean + 8 * distribution_i.StdDev, iterations); //Console.WriteLine($"S38R[{i}]: {complementProbs[i]}"); } return(complementProbs); }
// Method for direct computation with gauss legendre public static double[] ComputeDiscardComplementsGaussLegendre(Normal[] distributions) { double[] complementProbs = new double[distributions.Length]; for (int i = 0; i < distributions.Length; i++) { Normal distribution_i = distributions[i]; Func <double, double> integrand = x => { double product = distribution_i.Density(x); for (int j = 0; j < distributions.Length; j++) { if (j != i) { product *= 1 - distributions[j].CumulativeDistribution(x); } } return(product); }; complementProbs[i] = MathNet.Numerics.Integration.GaussLegendreRule.Integrate(integrand, distribution_i.Mean - 8 * distribution_i.StdDev, distribution_i.Mean + 8 * distribution_i.StdDev, 96); Console.WriteLine($"GL[{i}]: {complementProbs[i]}"); } return(complementProbs); }
public static double[] ComputeDiscardComplementsSimpsonAlt(Normal[] distributions, int iterations = 150) { double[] complementProbs = new double[distributions.Length]; distributions = NegateDistributions(distributions); // This change is local to this method for (int i = 0; i < distributions.Length; i++) { Normal distribution_i = distributions[i]; Func <double, double> integrand = x => { double product = distribution_i.Density(x); for (int j = 0; j < distributions.Length; j++) { if (j != i) { product *= distributions[j].CumulativeDistribution(x); } } return(product); }; complementProbs[i] = Integration.SimpsonsRule.Integrate(integrand, distribution_i.Mean - 8 * distribution_i.StdDev, distribution_i.Mean + 8 * distribution_i.StdDev, iterations); //Console.WriteLine($"S38RAlt[{i}]: {complementProbs[i]}"); } return(complementProbs); }
/// <summary> /// Theta = first derivative of price with respect to time to expiration. /// </summary> /// <returns>theta of the option</returns> public static double BlackScholesTheta(this EuropeanOption option, Date valueDate, double spot, double vol, double rate, double div) { var dist = new Normal(); var T = (double)(option._exerciseDate - valueDate) / 365; double d1 = D1(spot, option._strike, vol, rate, div, T); double d2 = D2(spot, option._strike, vol, rate, div, T); double theta; var flag = (double)option._putOrCall; double t1 = (Math.Exp(-div * T) * spot * dist.Density(d1) * vol * 0.5) / Math.Sqrt(T); double t2 = div * Math.Exp(-div * T) * spot * dist.CumulativeDistribution(flag * d1); double t3 = rate * option._strike * Math.Exp(-rate * T) * dist.CumulativeDistribution(flag * d2); if (option._putOrCall == PutOrCall.Call) { theta = -t1 + t2 - t3; } else { theta = -t1 - t2 + t3; } return(theta); }
public PointPairList getDensityCurve(IList <double> data, double bandwidth) { int intervals = 1000; var result = new PointPairList { Capacity = intervals }; //generate a base line var statistics = new DescriptiveStatistics(data); double minValue = data.Min() - (2 * statistics.StandardDeviation); double maxValue = data.Max() + (2 * statistics.StandardDeviation); double interval = (maxValue - minValue) / intervals * 1.0; for (int i = 0; i < intervals; i++) { result.Add(minValue + i * interval, 0); } if (bandwidth == 0) { bandwidth = 1.06 * statistics.StandardDeviation * Math.Pow(data.Count, -1.0 / 5); } var orderedData = data.OrderBy(o => o); foreach (var value in orderedData) { Normal nD = new Normal(0, 1); for (int q = 0; q < intervals; q++) { result[q].Y += (1 / (data.Count * bandwidth)) * nD.Density((value - result[q].X) / bandwidth); } } return(result); }
// Method with alt form using gauss legendre public static double[] ComplementsLegendre(Normal[] distributions, int order) { double[] complementProbs = new double[distributions.Length]; distributions = NegateDistributions(distributions); // This change is local to this method for (int i = 0; i < distributions.Length; i++) { Normal distribution_i = distributions[i]; Func <double, double> integrand = x => { double product = distribution_i.Density(x); for (int j = 0; j < distributions.Length; j++) { if (j != i) { product *= distributions[j].CumulativeDistribution(x); } } return(product); }; complementProbs[i] = GaussLegendre.Integrate(integrand, distribution_i.Mean - 8 * distribution_i.StdDev, distribution_i.Mean + 8 * distribution_i.StdDev, order); //complementProbs[i] = MathNet.Numerics.Integration.GaussLegendreRule.Integrate(integrand, // distribution_i.Mean - 8 * distribution_i.StdDev, distribution_i.Mean + 8 * distribution_i.StdDev, 96); //Console.WriteLine($"GL[{i}]: {complementProbs[i]}"); } return(complementProbs); }
/// <summary> /// Imputes missing intensity value for each protein /// </summary> private void ImputeData(ProteinRowInfo proteinRowInfo, double[] samplesMeanIntensityValue, double[] samplesStandardDeviation, List <string> samplesFileNames, double[] missingFactor, int[] numberOfIntensityValuesInSample, double meanFraction) { Dictionary <string, double> samplesintensityData = proteinRowInfo.SamplesIntensityData; for (int i = 0; i < samplesFileNames.Count; i++) { if (samplesintensityData[samplesFileNames[i]] == 0) { double imputedFraction = missingFactor[i] / (numberOfIntensityValuesInSample[i] + missingFactor[i]); if (imputedFraction <= 0.5) { double imputedProbability = imputedFraction / (1 - imputedFraction); double standardDeviationFraction = Math.Max(2 * imputedFraction, 0.3); double stdDevFraction = 0.6 * (1 - (imputedFraction * imputedFraction)); Normal probabilityDist = new Normal(samplesMeanIntensityValue[i], standardDeviationFraction); double probabilitySetPoint = probabilityDist.Density(samplesMeanIntensityValue[i] + stdDevFraction * standardDeviationFraction); double yCoordinate = imputedProbability * probabilitySetPoint; double deltaX = standardDeviationFraction * stdDevFraction; Normal xCoord = new Normal(samplesMeanIntensityValue[i], samplesStandardDeviation[i]); double deltaMu = xCoord.InverseCumulativeDistribution(yCoordinate); double meanDownshift = (deltaMu - deltaX * meanFraction); Normal normalDist = new Normal(meanDownshift, standardDeviationFraction); double imputeVal = normalDist.Sample(); samplesintensityData[samplesFileNames[i]] = imputeVal; } } } }
public static double[] ComplementsSimpsons38(Normal[] distributions, int steps = 150) { if (steps % 3 != 0) { steps += 3 - steps % 3; // Round up to a multiple of 3 } double[] complementProbs = new double[distributions.Length]; distributions = NegateDistributions(distributions); // This change is local to this method for (int i = 0; i < distributions.Length; i++) { Normal distribution_i = distributions[i]; Func <double, double> integrand = x => { double product = distribution_i.Density(x); for (int j = 0; j < distributions.Length; j++) { if (j != i) { product *= distributions[j].CumulativeDistribution(x); } } return(product); }; complementProbs[i] = NewtonCotes.Simpsons38Rule(integrand, distribution_i.Mean - 8 * distribution_i.StdDev, distribution_i.Mean + 8 * distribution_i.StdDev, steps); //Console.WriteLine($"S38[{i}]: {complementProbs[i]}"); } return(complementProbs); }
public void ValidateNormalDensityEquivalence(double location, double scale, double x) { var sgt = new SkewedGeneralizedT(location, scale, 0, 2, double.PositiveInfinity); var n = new Normal(location, scale); AssertHelpers.AlmostEqualRelative(n.Density(x), sgt.Density(x), 8); AssertHelpers.AlmostEqualRelative(n.DensityLn(x), sgt.DensityLn(x), 8); }
/// <summary> /// Runs a playout with two given controllers and reports the result. /// </summary> public static PlayoutResult Playout(GameInstance game, IMobController ai1, IMobController ai2) { var hub = new GameEventHub(game); game.MobManager.Teams[TeamColor.Red] = ai1; game.MobManager.Teams[TeamColor.Blue] = ai2; const int maxIterations = 100; int i = 0; for (; i < maxIterations && !game.IsFinished; i++) { game.CurrentController.FastPlayTurn(hub); ActionEvaluator.FNoCopy(game, UctAction.EndTurnAction()); } float totalMaxHp = 0; float totalCurrentHp = 0; foreach (var mobId in game.MobManager.Mobs) { totalMaxHp += game.MobManager.MobInfos[mobId].MaxHp; totalCurrentHp += Math.Max(0, game.State.MobInstances[mobId].Hp); } int red = 0; int blue = 0; Utils.Log(LogSeverity.Error, nameof(GameEvaluator), $"Playout time limit reached at {maxIterations} rounds"); if (i < maxIterations && game.VictoryTeam.HasValue) { if (game.VictoryTeam.Value == TeamColor.Red) { red++; } else { blue++; } Accounting.IncrementWinner(game.VictoryController); } var gamePercentage = totalCurrentHp / totalMaxHp; Debug.Assert(gamePercentage >= 0); var mobsCount = game.MobManager.Mobs.Count; var dis = new Normal(mobsCount * 2, mobsCount); dis.Density(mobsCount * 2); return(new PlayoutResult(i, gamePercentage, game.State.AllPlayed, i == maxIterations, red, blue)); }
public double GetNextProbability(int currentTick) { if (currentTick > (round + 1) * totalTicks) { //we start over. round++; } return(normalDis.Density(currentTick - (round * totalTicks))); }
/// <summary> /// Vega = first derivative of price with respect to volatility. /// </summary> /// <returns>vega of the option</returns> public static double BlackScholesVega(this EuropeanOption option, Date valueDate, double spot, double vol, double rate, double div) { var dist = new Normal(); var T = (double)(option._exerciseDate - valueDate) / 365; double d1 = D1(spot, option._strike, vol, rate, div, T); double vega; vega = spot * Math.Exp(-div * T) * dist.Density(d1) * Math.Sqrt(T); return(vega); }
float calcSpeed(float angle_in) { // angle convert to speed // angle: 1.2 ~ 3 // mean can set to 3 // standard deviation set to 1.2 with 1.2~3 as 1.5theta Normal speed_pdf = new Normal(30, 2); // input given and get the probability density(PDF) double speed_out = speed_pdf.Density(10 * (double)angle_in); //Debug.Log("angle_in" + angle_in); //Debug.Log("speed_out: " + speed_out); return((float)speed_out); }
/// <summary> /// Truncated N(0, 1) to (x1, x2), where P(X <= x1) = trim and P(X <= x2) = 1 - trim /// </summary> /// <param name="trim">tail probability</param> /// <returns>approximately (E[X^2] = 1) / Et[X^2]</returns> private static double InflationFactor(double trim) { var norm = new Normal(); // N(0, 1) double a = norm.InverseCumulativeDistribution(1 - trim); double step = 2 * a / 10000; double[] x1s = Helper.Seq(-a + step / 2, a - step / 2, 10000); // Truncated N(0, 1) to P(X <= x) = trim and P(X <= x) = 1 - trim double eX2 = 0.0; foreach (double x1 in x1s) { eX2 += (x1 * x1) * norm.Density(x1); } eX2 = eX2 * step / (1 - 2 * trim); // eX2 now approximates Et[X^2]: E[X^2] of the truncated N(0, 1) return(1 / eX2); // approx (E[X^2] = 1) / Et[X^2] // == 1 / (1 + (-a * dnorm(-a) - a * dnorm(a)) / (1 - 2*trim) - ((dnorm(-1) - dnorm(a)) / (1 - 2* trim))^2) // According to http://en.wikipedia.org/wiki/Truncated_normal_distribution }
public int EstimateOverTime(int estimatedtime) { double mean = estimatedtime; MathNet.Numerics.Distributions.Normal normalDist = new Normal(mean, _std); double d = mean + 1.0; double sum = 0; while (true) { double density = normalDist.Density(d); if (density < 1e-10) { break; } sum += d * density; d = d + 1.0; } sum = Math.Round(sum, 0); return((int)sum); }
/// <summary> Uses Gauss-Hermite quadrature on the alternative integral form with an invariant to compute the probability that each of the distributions is the minimum </summary> /// <param name="distributions"> The set of distributions to consider </param> /// <param name="evalPoints"> The set of evaluation points </param> /// <param name="weights"> The set of weights, in the same order as the eval points </param> /// <returns> An array of probabilities indexed to match the distribution array </returns> /// <remarks> This will require up to 3NV special function evaluations, where N is the number of distributions and V is the number of evaluation points. </remarks> public static double[] ComputeDiscardComplementsGaussHermiteAltInvariant(Normal[] distributions, double[] evalPoints, double[] weights) { if (evalPoints.Length != weights.Length) { throw new ArgumentException("Error: Evaluation points must have same length as weights."); } distributions = NegateDistributions(distributions); // This change is local to this method // Compute the interval of integration double minMean = distributions[0].Mean; double maxMean = distributions[0].Mean; double maxStdev = 0; for (int i = 0; i < distributions.Length; i++) { if (distributions[i].Mean < minMean) { minMean = distributions[i].Mean; } if (distributions[i].Mean > maxMean) { maxMean = distributions[i].Mean; } if (distributions[i].StdDev > maxStdev) { maxStdev = distributions[i].StdDev; } } //double a = (minMean + maxMean) / 2; // Original double a = maxMean; //double b = (maxMean - minMean) / Math.Sqrt(2); // Original //double b = Math.Sqrt(2) * Math.Min((maxMean - minMean) / 2, maxStdev); // Worse double b = Math.Sqrt(2) * Math.Min(maxMean, maxStdev); // Better than original Normal U = new Normal(a, b); // Original //Normal U = new Normal(a, Math.Min((maxMean - minMean) / 2, maxStdev)); // Worse //Normal U = new Normal(a, Math.Min(maxMean, maxStdev)); // Better than the original // Compute the change of variable function Func <double, double> xOfz = z => b * z + a; // Compute the vector of constants double[] C = new double[evalPoints.Length]; double[] X = new double[evalPoints.Length]; for (int i = 0; i < C.Length; i++) { X[i] = xOfz(evalPoints[i]); C[i] = weights[i] / U.Density(X[i]); for (int j = 0; j < distributions.Length; j++) { C[i] *= distributions[j].CumulativeDistribution(X[i]); } } // --- Perform the Integration --- double[] complementProbs = new double[distributions.Length]; for (int i = 0; i < distributions.Length; i++) { complementProbs[i] = 0; for (int j = 0; j < C.Length; j++) { double CDFij = distributions[i].CumulativeDistribution(X[j]); if (CDFij > 0) { complementProbs[i] += distributions[i].Density(X[j]) * C[j] / CDFij; } } complementProbs[i] /= Math.Sqrt(Math.PI); Console.WriteLine($"GHAltInv[{i}]: {complementProbs[i]}"); } return(complementProbs); }
/// <summary> /// Run example /// </summary> /// <a href="http://en.wikipedia.org/wiki/Normal_distribution">Normal distribution</a> public void Run() { // 1. Initialize the new instance of the Normal distribution class with parameters Mean = 0, StdDev = 1 var normal = new Normal(0, 1); Console.WriteLine(@"1. Initialize the new instance of the Normal distribution class with parameters Mean = {0}, StdDev = {1}", normal.Mean, normal.StdDev); Console.WriteLine(); // 2. Distributuion properties: Console.WriteLine(@"2. {0} distributuion properties:", normal); // Cumulative distribution function Console.WriteLine(@"{0} - Сumulative distribution at location '0.3'", normal.CumulativeDistribution(0.3).ToString(" #0.00000;-#0.00000")); // Probability density Console.WriteLine(@"{0} - Probability density at location '0.3'", normal.Density(0.3).ToString(" #0.00000;-#0.00000")); // Log probability density Console.WriteLine(@"{0} - Log probability density at location '0.3'", normal.DensityLn(0.3).ToString(" #0.00000;-#0.00000")); // Entropy Console.WriteLine(@"{0} - Entropy", normal.Entropy.ToString(" #0.00000;-#0.00000")); // Largest element in the domain Console.WriteLine(@"{0} - Largest element in the domain", normal.Maximum.ToString(" #0.00000;-#0.00000")); // Smallest element in the domain Console.WriteLine(@"{0} - Smallest element in the domain", normal.Minimum.ToString(" #0.00000;-#0.00000")); // Mean Console.WriteLine(@"{0} - Mean", normal.Mean.ToString(" #0.00000;-#0.00000")); // Median Console.WriteLine(@"{0} - Median", normal.Median.ToString(" #0.00000;-#0.00000")); // Mode Console.WriteLine(@"{0} - Mode", normal.Mode.ToString(" #0.00000;-#0.00000")); // Variance Console.WriteLine(@"{0} - Variance", normal.Variance.ToString(" #0.00000;-#0.00000")); // Standard deviation Console.WriteLine(@"{0} - Standard deviation", normal.StdDev.ToString(" #0.00000;-#0.00000")); // Skewness Console.WriteLine(@"{0} - Skewness", normal.Skewness.ToString(" #0.00000;-#0.00000")); Console.WriteLine(); // 3. Generate 10 samples Console.WriteLine(@"3. Generate 10 samples"); for (var i = 0; i < 10; i++) { Console.Write(normal.Sample().ToString("N05") + @" "); } Console.WriteLine(); Console.WriteLine(); // 4. Generate 100000 samples of the Normal(0, 1) distribution and display histogram Console.WriteLine(@"4. Generate 100000 samples of the Normal(0, 1) distribution and display histogram"); var data = new double[100000]; for (var i = 0; i < data.Length; i++) { data[i] = normal.Sample(); } ConsoleHelper.DisplayHistogram(data); Console.WriteLine(); // 5. Generate 100000 samples of the Normal(-10, 0.2) distribution and display histogram Console.WriteLine(@"5. Generate 100000 samples of the Normal(-10, 0.01) distribution and display histogram"); normal.Mean = -10; normal.StdDev = 0.01; for (var i = 0; i < data.Length; i++) { data[i] = normal.Sample(); } ConsoleHelper.DisplayHistogram(data); }
public IEnumerable <double> SolveOptimizationProblemWithOutliersFiltering(out List <List <double> > parametersHistory, out List <List <SpacePointsType> > filteredEventsListsHistory, double precision = 1.0e-8d) { parametersHistory = new List <List <double> >(); filteredEventsListsHistory = new List <List <SpacePointsType> >(); int startingEventsSetLength = mEventsSpaceVector.Count(); int prevEventsSetLength = mEventsSpaceVector.Count(); int newEventsSetLength = mEventsSpaceVector.Count(); int epoch = 0; bool success = false; while (!success) { nParametersSpacePoint = SolveOptimizationProblem(precision); parametersHistory.Add(nParametersSpacePoint.ToList()); // find outliers indexes List <double> funcDevVector = this.ObjectiveDeviations(nParametersSpacePoint).ToList(); #region debugging presentations #if DEBUG //HistogramDataAndProperties hist = new HistogramDataAndProperties( // DenseVector.OfEnumerable(funcDevVector), 50); //HistogramCalcAndShowForm histForm = new HistogramCalcAndShowForm("", null); //histForm.HistToRepresent = hist; //histForm.Represent(); //histForm.SaveToImage("D:\\_gulevlab\\SkyImagesAnalysis_appData\\RV-ANS-31-test\\hist_" + epoch.ToString("D3") + ".jpg"); #endif #endregion debugging presentations List <bool> lIsOutlier = mEventsSpaceVector.ToList().ConvertAll <bool>(val => false); Normal normDistrib = Normal.Estimate(funcDevVector, null); lIsOutlier = funcDevVector.ConvertAll <bool>(devVal => normDistrib.Density(devVal) <= 0.2d * normDistrib.Density(normDistrib.Mean)); #region DEBUG: manually set 0.03 fraction of set to true //List<int> lOutliersIndexes = RandPerm(mEventsSpaceVector.Count()).ToList(); //int maxOutlierPosition = Convert.ToInt32(Math.Floor(0.03d*lOutliersIndexes.Count) + 1); //for (int i = 0; i < maxOutlierPosition; i++) //{ // lIsOutlier[lOutliersIndexes[i]] = true; //} #endregion DEBUG: manually set 0.03 fraction of set to true // filter outliers mEventsSpaceVector = new List <SpacePointsType>(mEventsSpaceVector.Where((val, idx) => !lIsOutlier[idx])); mFittingValuesVector = new List <double>(mFittingValuesVector.Where((val, idx) => !lIsOutlier[idx])); filteredEventsListsHistory.Add(mEventsSpaceVector.ToList()); newEventsSetLength = mEventsSpaceVector.Count(); if ((double)Math.Abs(newEventsSetLength - prevEventsSetLength) / (double)prevEventsSetLength <= 0.005) { success = true; epoch++; #region debugging presentations #if DEBUG //hist = new HistogramDataAndProperties(DenseVector.OfEnumerable(funcDevVector), 50); //histForm.HistToRepresent = hist; //histForm.Represent(); //histForm.SaveToImage("D:\\_gulevlab\\SkyImagesAnalysis_appData\\RV-ANS-31-test\\hist_" + epoch.ToString("D3") + ".jpg"); #endif #endregion debugging presentations } prevEventsSetLength = newEventsSetLength; epoch++; } return(nParametersSpacePoint); }
public static void TestNumericalIntegration(Random rand) { BogaertGLWrapper.Initialize(); /* * double[] nodesAndWeightsRaw = BogaertGLWrapper.GetGLNodesAndWeights(10000000); * * // Count how many can be culled * int count = 0; * for (int i = 0; i < nodesAndWeightsRaw.Length / 2; i++) * { * double x = nodesAndWeightsRaw[2 * i]; * double w = nodesAndWeightsRaw[2 * i + 1]; * if (x < 0 && w * Normal.CDF(0, 1.0 / 8, x) < 10E-18) { count++; } * else if (x > 0 && w * Normal.PDF(0, 1.0 / 8, x) < 10E-18) { count++; } * } * Console.WriteLine($"Could cull {count} out of {nodesAndWeightsRaw.Length / 2} evaluation points."); * * Console.ReadKey(); */ // Parameters int numberOfDistributions = 20; double minMeanFitness = 8; double maxMeanFitness = 60; double minStDev = 6; double maxStDev = 25; // Computed values double fitnessRange = maxMeanFitness - minMeanFitness; double stDevRange = maxStDev - minStDev; // Set up the distributions and pick the one with the biggest mean to be i Normal[] distributions = new Normal[numberOfDistributions]; Normal distribution_i = new Normal(); double minMean = -1 * minMeanFitness, maxMean = -1 * maxMeanFitness; // Starting points for finding min and max in the set (not an error) for (int i = 0; i < distributions.Length; i++) { distributions[i] = new Normal(-1 * (minMeanFitness + fitnessRange * rand.NextDouble()), minStDev + stDevRange * rand.NextDouble()); if (distributions[i].Mean > maxMean) { maxMean = distributions[i].Mean; } if (distributions[i].Mean < minMean) { minMean = distributions[i].Mean; distribution_i = distributions[i]; } Console.WriteLine($"Dist {i}: mean {distributions[i].Mean}, stdev {distributions[i].StdDev}"); } Func <double, double> altForm = x => { double cdfi = distribution_i.CumulativeDistribution(x); if (cdfi == 0 || double.IsNaN(cdfi)) { return(0); } double product = distribution_i.Density(x) / cdfi; for (int i = 0; i < distributions.Length; i++) { product *= distributions[i].CumulativeDistribution(x); } return(product); }; /* * double correctResult = SimpsonsRule.Integrate(altForm, minMean - 3 * maxStDev, maxMean + 3 * maxStDev, 600); * Console.WriteLine($"Simp 3/8 (600): 1 - P(D_i) = {correctResult}"); * * double gaussLegendreResult = MathNet.Numerics.Integration.GaussLegendreRule.Integrate(altForm, minMean - 3 * maxStDev, maxMean + 3 * maxStDev, 128); * Console.WriteLine($"Gauss-Legendre 128: 1 - P(D_i) = {gaussLegendreResult}"); */ double[] discardProbs = new double[distributions.Length]; /* * for (int i = 0; i < distributions.Length; i++) * { * distribution_i = distributions[i]; * discardProbs[i] = SimpsonsRule.Integrate(altForm, minMean - 8 * maxStDev, maxMean + 8 * maxStDev, 1500); * Console.WriteLine($"Simp 3/8 (1500): 1 - P(D_{i}) = {discardProbs[i]}"); * }*/ List <double> discardProbList = new List <double>(discardProbs); discardProbList.Sort(); double sum = 0; for (int i = 0; i < discardProbList.Count; i++) { //Console.WriteLine($"Sorted: {discardProbList[i]}"); sum += discardProbList[i]; } // Console.WriteLine($"Sum of probabilities is {sum}"); /* * NormalComparison.ComputeDiscardComplementsSimpson(distributions); * NormalComparison.ComputeDiscardComplementsGaussHermite(distributions); * NormalComparison.ComputeDiscardComplementsGaussLegendre(distributions); * NormalComparison.ComputeDiscardComplementsSimpsonAlt(distributions); * NormalComparison.ComputeDiscardComplementsGaussHermiteAlt(distributions); * NormalComparison.ComputeDiscardComplementsGaussLegendreAlt(distributions); */ List <double> output; sum = 0; output = new List <double>(NormalComparison.ComputeDiscardComplementsSimpson38AltInvariant(distributions, 450)); output.Sort(); for (int i = 0; i < output.Count; i++) { sum += output[i]; } Console.WriteLine($"Sum of probabilities is {sum}"); /* * Console.WriteLine($"Dist 1 : 1/sqrt3 & 2 : 1/sqrt3"); * distributions = new Normal[] { new Normal(1, 1.0/Math.Sqrt(3)), new Normal(2, 1.0/ Math.Sqrt(3)) }; * Console.WriteLine($"Exact = {NormalComparison.ComputeDiscardProbabilityPairwiseExact(distributions[0], distributions[1])}"); * NormalComparison.ComputeDiscardComplementsGaussLegendreAlt(distributions); * NormalComparison.ComputeDiscardComplementsSimpson38AltInvariant(distributions, 210); * Console.WriteLine($"4 Dists 1 : 1 & 2 : 1"); * distributions = new Normal[10]; * for (int i = 0; i < distributions.Length - 1; i++) { distributions[i] = new Normal(1, 1); } * distributions[distributions.Length - 1] = new Normal(2, 1); */ /* * NormalComparison.ComputeDiscardComplementsGaussLegendreAlt(distributions); * output = new List<double>(NormalComparison.ComputeDiscardComplementsSimpson38AltInvariant(distributions, 210)); * output.Sort(); * sum = 0; * for (int i = 0; i < output.Count; i++) * { * sum += output[i]; * } * Console.WriteLine($"Sum of probabilities is {sum}"); */ output = new List <double>(NormalComparison.ComputeDiscardComplementsGaussLegendreAltInvariant(distributions, GaussLegendre.evalPoints75opt, GaussLegendre.weights75opt)); output.Sort(); sum = 0; for (int i = 0; i < output.Count; i++) { sum += output[i]; } Console.WriteLine($"Sum of probabilities is {sum}"); output = new List <double>(NormalComparison.ComputeDiscardComplementsGaussHermiteAltInvariant(distributions, GaussHermite.evaluationPoints70opt, GaussHermite.weights70opt)); output.Sort(); sum = 0; for (int i = 0; i < output.Count; i++) { sum += output[i]; } Console.WriteLine($"Sum of probabilities is {sum}"); output = new List <double>(NormalComparison.ComputeDiscardComplementsClenshawCurtisAltInvariant(distributions, 450)); output.Sort(); sum = 0; for (int i = 0; i < output.Count; i++) { sum += output[i]; } Console.WriteLine($"Sum of probabilities is {sum}"); System.Diagnostics.Stopwatch watch = new System.Diagnostics.Stopwatch(); watch.Start(); double[] bigTest = NormalComparison.ComputeDiscardComplementsClenshawCurtisAltInvariantAutomatic(distributions); watch.Stop(); sum = 0; for (int i = 0; i < bigTest.Length; i++) { //Console.WriteLine($"CCAltInvAuto[{i}]: {bigTest[i]}"); sum += bigTest[i]; } Console.WriteLine($"Sum of probabilities is {sum}"); Console.WriteLine($"Total Error lower bound: {Math.Abs(sum - 1)}"); Console.WriteLine($"Time: {watch.Elapsed.TotalMilliseconds}ms"); discardProbList = new List <double>(bigTest); discardProbList.Sort(); { double certainty = 1; int idx = 0; while (true) { double newval = certainty - discardProbList[idx]; if (newval < 0.95) { break; } certainty = newval; idx++; } Console.WriteLine($"Can discard {idx} distributions with 95% certainty"); } watch.Restart(); bigTest = NormalComparison.ComputeDiscardComplementsSimpson38AltInvariantAutomatic(distributions); watch.Stop(); output = new List <double>(bigTest); output.Sort(); sum = 0; for (int i = 0; i < output.Count; i++) { sum += output[i]; } Console.WriteLine($"S38 Sum of probabilities is {sum}"); Console.WriteLine($"S38 Time: {watch.Elapsed.TotalMilliseconds}ms"); }
public void UpdateDistributionChart(CartesianChart inChart, List <double> inVals) { // Clears the chart's X Labels inChart.AxisX[0].Labels.Clear(); ColumnSeries colSeries = inChart.Series.FirstOrDefault(a => a is ColumnSeries) as ColumnSeries; // Clears the chart's Series' data colSeries.Values.Clear(); // Nothing in the list if (inVals.Count < 2) // 0 or 1 { // Hides the chart. inChart.Visibility = Visibility.Hidden; return; } double mean = inVals.Mean(); double stDev = inVals.StandardDeviation(); double max = inVals.Max(); double min = inVals.Min(); if (double.IsNaN(mean) || double.IsNaN(stDev) || max == min) { // Hides the chart. inChart.Visibility = Visibility.Hidden; return; } // Displays the chart inChart.Visibility = Visibility.Visible; double fullRange = max - min; int colDivs = 20; Dictionary <double, int> colValDict = new Dictionary <double, int>(); // Initializes the dictionary for (int i = 0; i < colDivs; i++) { double rMin = min + ((fullRange / colDivs) * (double)i); double rMax = min + ((fullRange / colDivs) * (double)(i + 1)); double rMid = ((rMax - rMin) / 2d) + rMin; colValDict.Add(rMid, 0); } // Counts the number of elements in each bracket foreach (double val in inVals) { // Finds the closest column var kvp = colValDict.AsEnumerable().OrderBy(inPair => Math.Abs(val - inPair.Key)).First(); colValDict[kvp.Key]++; } // Sets the labels of the X axis inChart.AxisX[0].Separator.Step = 1; inChart.AxisX[0].LabelsRotation = -90d; ((List <string>)inChart.AxisX[0].Labels).AddRange(colValDict.Keys.Select(b => $"{b:+0.0e+00;-0.0e+00;0.0}")); // Sets the column chart' data colSeries.Values.AddRange(colValDict.Values.Cast <object>()); // Deletes the previous Normal Distribution Line Data From the Chart if (inChart.Series.Count > 1) { inChart.Series.RemoveAt(1); } if (inChart.AxisY.Count > 1) { inChart.AxisY.RemoveAt(1); } if (inChart.AxisX.Count > 1) { inChart.AxisX.RemoveAt(1); } // Working with the normal distribution Normal n = new Normal(mean, stDev); ChartValues <ObservablePoint> normalValDict = new ChartValues <ObservablePoint>(); // Fills the dictionary int normCount = (colDivs * 2); double normStepSize = (max - min) / normCount; for (int i = 0; i < (normCount + 1); i++) { double val = min + normStepSize * i; normalValDict.Add(new ObservablePoint(val, n.Density(val))); } inChart.AxisX.Add(new Axis() { //Labels = new List<string>(), ShowLabels = false, Separator = new Separator() { IsEnabled = false }, Sections = new SectionsCollection() { new AxisSection() { Value = mean - stDev, SectionWidth = stDev, Fill = new SolidColorBrush() { Color = Colors.DarkOrange, Opacity = 0.3 }, //Stroke = new SolidColorBrush(){ Color = Colors.DarkRed, Opacity = 1d}, //StrokeThickness = 2d, }, new AxisSection() { Value = mean + stDev, SectionWidth = -stDev, Fill = new SolidColorBrush() { Color = Colors.DarkOrange, Opacity = 0.3 }, //Stroke = new SolidColorBrush(){ Color = Colors.DarkRed, Opacity = 1d}, //StrokeThickness = 2d, }, new AxisSection() { Value = mean, Stroke = new SolidColorBrush() { Color = Colors.DarkRed, Opacity = 1d }, StrokeThickness = 2d, } }, MinValue = min, MaxValue = max, }); inChart.AxisY.Add(new Axis() { Position = AxisPosition.RightTop, ShowLabels = false, Separator = new Separator() { IsEnabled = false }, }); inChart.Series.Add(new LineSeries() { Values = normalValDict, PointForeground = AppSS.FirstReferencedWindow.Resources["EmsPanelBorder_Green"] as SolidColorBrush, Foreground = AppSS.FirstReferencedWindow.Resources["EmsPanelBorder_Green"] as SolidColorBrush, Stroke = AppSS.FirstReferencedWindow.Resources["EmsPanelBorder_Green"] as SolidColorBrush, StrokeThickness = 1d, Fill = new SolidColorBrush() { Opacity = 0d }, PointGeometrySize = 0d, LineSmoothness = 0.5d, ScalesYAt = 1, ScalesXAt = 1, }); }